diff --git a/lib/python3.11/site-packages/__pycache__/typing_extensions.cpython-311.pyc b/lib/python3.11/site-packages/__pycache__/typing_extensions.cpython-311.pyc new file mode 100644 index 0000000..ffd1b0c Binary files /dev/null and b/lib/python3.11/site-packages/__pycache__/typing_extensions.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/conf/urls/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/django/conf/urls/__pycache__/__init__.cpython-311.pyc index 70f2a0a..2235887 100644 Binary files a/lib/python3.11/site-packages/django/conf/urls/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/django/conf/urls/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/admin/__pycache__/forms.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/admin/__pycache__/forms.cpython-311.pyc index f8164e4..011eee3 100644 Binary files a/lib/python3.11/site-packages/django/contrib/admin/__pycache__/forms.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/admin/__pycache__/forms.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0001_initial.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0001_initial.cpython-311.pyc index 186f605..dfd6a2c 100644 Binary files a/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0001_initial.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0001_initial.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0002_logentry_remove_auto_add.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0002_logentry_remove_auto_add.cpython-311.pyc index 9061aa2..3254fcd 100644 Binary files a/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0002_logentry_remove_auto_add.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0002_logentry_remove_auto_add.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0003_logentry_add_action_flag_choices.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0003_logentry_add_action_flag_choices.cpython-311.pyc index ac0b13d..0eddd2c 100644 Binary files a/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0003_logentry_add_action_flag_choices.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/0003_logentry_add_action_flag_choices.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/__init__.cpython-311.pyc index 03aee7a..4c170f9 100644 Binary files a/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/admin/migrations/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/__pycache__/decorators.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/__pycache__/decorators.cpython-311.pyc index f391b7b..b4f5d68 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/__pycache__/decorators.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/__pycache__/decorators.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/__pycache__/views.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/__pycache__/views.cpython-311.pyc index ac30572..3ce1447 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/__pycache__/views.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/__pycache__/views.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/management/commands/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/management/commands/__pycache__/__init__.cpython-311.pyc index 9abdba8..3f1b65a 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/management/commands/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/management/commands/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/management/commands/__pycache__/createsuperuser.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/management/commands/__pycache__/createsuperuser.cpython-311.pyc index d79d4fe..ef58760 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/management/commands/__pycache__/createsuperuser.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/management/commands/__pycache__/createsuperuser.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0001_initial.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0001_initial.cpython-311.pyc index 591877a..3956893 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0001_initial.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0001_initial.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0002_alter_permission_name_max_length.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0002_alter_permission_name_max_length.cpython-311.pyc index a6cd5c0..5cb0366 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0002_alter_permission_name_max_length.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0002_alter_permission_name_max_length.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0003_alter_user_email_max_length.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0003_alter_user_email_max_length.cpython-311.pyc index 0baab25..4896538 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0003_alter_user_email_max_length.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0003_alter_user_email_max_length.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0004_alter_user_username_opts.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0004_alter_user_username_opts.cpython-311.pyc index 17932f7..6f4203b 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0004_alter_user_username_opts.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0004_alter_user_username_opts.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0005_alter_user_last_login_null.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0005_alter_user_last_login_null.cpython-311.pyc index 2e8efae..733c52b 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0005_alter_user_last_login_null.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0005_alter_user_last_login_null.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0006_require_contenttypes_0002.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0006_require_contenttypes_0002.cpython-311.pyc index 40e89bc..33c81dc 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0006_require_contenttypes_0002.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0006_require_contenttypes_0002.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0007_alter_validators_add_error_messages.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0007_alter_validators_add_error_messages.cpython-311.pyc index 108804d..2911379 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0007_alter_validators_add_error_messages.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0007_alter_validators_add_error_messages.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0008_alter_user_username_max_length.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0008_alter_user_username_max_length.cpython-311.pyc index adca585..03c2873 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0008_alter_user_username_max_length.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0008_alter_user_username_max_length.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0009_alter_user_last_name_max_length.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0009_alter_user_last_name_max_length.cpython-311.pyc index 473fa18..e2ff853 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0009_alter_user_last_name_max_length.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0009_alter_user_last_name_max_length.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0010_alter_group_name_max_length.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0010_alter_group_name_max_length.cpython-311.pyc index 8ba01ee..b06c151 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0010_alter_group_name_max_length.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0010_alter_group_name_max_length.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0011_update_proxy_permissions.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0011_update_proxy_permissions.cpython-311.pyc index 78b7163..40dc27f 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0011_update_proxy_permissions.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0011_update_proxy_permissions.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0012_alter_user_first_name_max_length.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0012_alter_user_first_name_max_length.cpython-311.pyc index 5989ff7..3b392e9 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0012_alter_user_first_name_max_length.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/0012_alter_user_first_name_max_length.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/__init__.cpython-311.pyc index e71566c..bc7f9ed 100644 Binary files a/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/auth/migrations/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/0001_initial.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/0001_initial.cpython-311.pyc index 68783f9..fa9a9b8 100644 Binary files a/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/0001_initial.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/0001_initial.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/0002_remove_content_type_name.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/0002_remove_content_type_name.cpython-311.pyc index 6c1246d..3b35507 100644 Binary files a/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/0002_remove_content_type_name.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/0002_remove_content_type_name.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/__init__.cpython-311.pyc index a7ee784..d0b90b7 100644 Binary files a/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/contenttypes/migrations/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/sessions/migrations/__pycache__/0001_initial.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/sessions/migrations/__pycache__/0001_initial.cpython-311.pyc index c18566d..19f2d77 100644 Binary files a/lib/python3.11/site-packages/django/contrib/sessions/migrations/__pycache__/0001_initial.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/sessions/migrations/__pycache__/0001_initial.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/contrib/sessions/migrations/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/django/contrib/sessions/migrations/__pycache__/__init__.cpython-311.pyc index 03fccc4..ce95b2d 100644 Binary files a/lib/python3.11/site-packages/django/contrib/sessions/migrations/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/django/contrib/sessions/migrations/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/core/cache/backends/__pycache__/locmem.cpython-311.pyc b/lib/python3.11/site-packages/django/core/cache/backends/__pycache__/locmem.cpython-311.pyc index 7942144..14f5b8d 100644 Binary files a/lib/python3.11/site-packages/django/core/cache/backends/__pycache__/locmem.cpython-311.pyc and b/lib/python3.11/site-packages/django/core/cache/backends/__pycache__/locmem.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/__init__.cpython-311.pyc index e479e39..7c88da0 100644 Binary files a/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/base.cpython-311.pyc b/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/base.cpython-311.pyc index 67ca205..f6abebb 100644 Binary files a/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/base.cpython-311.pyc and b/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/base.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/smtp.cpython-311.pyc b/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/smtp.cpython-311.pyc index c53c579..a93c499 100644 Binary files a/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/smtp.cpython-311.pyc and b/lib/python3.11/site-packages/django/core/mail/backends/__pycache__/smtp.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/__init__.cpython-311.pyc index 4df5504..c41fa5b 100644 Binary files a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/base.cpython-311.pyc b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/base.cpython-311.pyc index 481eb06..664cc0b 100644 Binary files a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/base.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/base.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/client.cpython-311.pyc b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/client.cpython-311.pyc index 2808de4..3a24939 100644 Binary files a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/client.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/client.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/creation.cpython-311.pyc b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/creation.cpython-311.pyc index 41920b1..faf9c5a 100644 Binary files a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/creation.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/creation.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/features.cpython-311.pyc b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/features.cpython-311.pyc index fec2180..c0f2daa 100644 Binary files a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/features.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/features.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/introspection.cpython-311.pyc b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/introspection.cpython-311.pyc index aefde36..ea301d7 100644 Binary files a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/introspection.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/introspection.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/operations.cpython-311.pyc b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/operations.cpython-311.pyc index c9d52fe..77035eb 100644 Binary files a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/operations.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/operations.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/psycopg_any.cpython-311.pyc b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/psycopg_any.cpython-311.pyc index 71144c7..869dffe 100644 Binary files a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/psycopg_any.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/psycopg_any.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/schema.cpython-311.pyc b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/schema.cpython-311.pyc index 282deaf..1e49bff 100644 Binary files a/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/schema.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/backends/postgresql/__pycache__/schema.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/migrations/__pycache__/executor.cpython-311.pyc b/lib/python3.11/site-packages/django/db/migrations/__pycache__/executor.cpython-311.pyc index 43368d5..2b40386 100644 Binary files a/lib/python3.11/site-packages/django/db/migrations/__pycache__/executor.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/migrations/__pycache__/executor.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/migrations/__pycache__/graph.cpython-311.pyc b/lib/python3.11/site-packages/django/db/migrations/__pycache__/graph.cpython-311.pyc index bf5a9b0..b566f61 100644 Binary files a/lib/python3.11/site-packages/django/db/migrations/__pycache__/graph.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/migrations/__pycache__/graph.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/migrations/__pycache__/loader.cpython-311.pyc b/lib/python3.11/site-packages/django/db/migrations/__pycache__/loader.cpython-311.pyc index 0f9814e..560123b 100644 Binary files a/lib/python3.11/site-packages/django/db/migrations/__pycache__/loader.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/migrations/__pycache__/loader.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/db/migrations/__pycache__/recorder.cpython-311.pyc b/lib/python3.11/site-packages/django/db/migrations/__pycache__/recorder.cpython-311.pyc index f0951cb..b5b0d44 100644 Binary files a/lib/python3.11/site-packages/django/db/migrations/__pycache__/recorder.cpython-311.pyc and b/lib/python3.11/site-packages/django/db/migrations/__pycache__/recorder.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/views/__pycache__/csrf.cpython-311.pyc b/lib/python3.11/site-packages/django/views/__pycache__/csrf.cpython-311.pyc index 31ad117..6bb7ed6 100644 Binary files a/lib/python3.11/site-packages/django/views/__pycache__/csrf.cpython-311.pyc and b/lib/python3.11/site-packages/django/views/__pycache__/csrf.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/django/views/__pycache__/defaults.cpython-311.pyc b/lib/python3.11/site-packages/django/views/__pycache__/defaults.cpython-311.pyc index 086dfb0..968ac59 100644 Binary files a/lib/python3.11/site-packages/django/views/__pycache__/defaults.cpython-311.pyc and b/lib/python3.11/site-packages/django/views/__pycache__/defaults.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip-23.2.dist-info/AUTHORS.txt b/lib/python3.11/site-packages/pip-23.2.1.dist-info/AUTHORS.txt similarity index 99% rename from lib/python3.11/site-packages/pip-23.2.dist-info/AUTHORS.txt rename to lib/python3.11/site-packages/pip-23.2.1.dist-info/AUTHORS.txt index 299459f..77eb39a 100644 --- a/lib/python3.11/site-packages/pip-23.2.dist-info/AUTHORS.txt +++ b/lib/python3.11/site-packages/pip-23.2.1.dist-info/AUTHORS.txt @@ -446,6 +446,7 @@ Matthew Einhorn Matthew Feickert Matthew Gilliard Matthew Iversen +Matthew Treinish Matthew Trumbell Matthew Willson Matthias Bussonnier diff --git a/lib/python3.11/site-packages/pip-23.2.dist-info/INSTALLER b/lib/python3.11/site-packages/pip-23.2.1.dist-info/INSTALLER similarity index 100% rename from lib/python3.11/site-packages/pip-23.2.dist-info/INSTALLER rename to lib/python3.11/site-packages/pip-23.2.1.dist-info/INSTALLER diff --git a/lib/python3.11/site-packages/pip-23.2.dist-info/LICENSE.txt b/lib/python3.11/site-packages/pip-23.2.1.dist-info/LICENSE.txt similarity index 100% rename from lib/python3.11/site-packages/pip-23.2.dist-info/LICENSE.txt rename to lib/python3.11/site-packages/pip-23.2.1.dist-info/LICENSE.txt diff --git a/lib/python3.11/site-packages/pip-23.2.dist-info/METADATA b/lib/python3.11/site-packages/pip-23.2.1.dist-info/METADATA similarity index 97% rename from lib/python3.11/site-packages/pip-23.2.dist-info/METADATA rename to lib/python3.11/site-packages/pip-23.2.1.dist-info/METADATA index b9e64e8..c503b33 100644 --- a/lib/python3.11/site-packages/pip-23.2.dist-info/METADATA +++ b/lib/python3.11/site-packages/pip-23.2.1.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: pip -Version: 23.2 +Version: 23.2.1 Summary: The PyPA recommended tool for installing Python packages. Home-page: https://pip.pypa.io/ Author: The pip developers diff --git a/lib/python3.11/site-packages/pip-23.2.dist-info/RECORD b/lib/python3.11/site-packages/pip-23.2.1.dist-info/RECORD similarity index 97% rename from lib/python3.11/site-packages/pip-23.2.dist-info/RECORD rename to lib/python3.11/site-packages/pip-23.2.1.dist-info/RECORD index b9b402c..00c7e90 100644 --- a/lib/python3.11/site-packages/pip-23.2.dist-info/RECORD +++ b/lib/python3.11/site-packages/pip-23.2.1.dist-info/RECORD @@ -1,16 +1,16 @@ ../../../bin/pip,sha256=trvaNchvvmdPIgnta9U_X9O4VgEwL_0VuDPU8uKIADw,240 ../../../bin/pip3,sha256=trvaNchvvmdPIgnta9U_X9O4VgEwL_0VuDPU8uKIADw,240 ../../../bin/pip3.11,sha256=trvaNchvvmdPIgnta9U_X9O4VgEwL_0VuDPU8uKIADw,240 -pip-23.2.dist-info/AUTHORS.txt,sha256=9IQBZZpwYXOsvuOsbgi3ytH_J00qkFSRMAmwfSpXV1c,10065 -pip-23.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip-23.2.dist-info/LICENSE.txt,sha256=Y0MApmnUmurmWxLGxIySTFGkzfPR_whtw0VtyLyqIQQ,1093 -pip-23.2.dist-info/METADATA,sha256=RCOzPh71ebcrKz_mjt32A7o8d4ss0X3ZiZlPvGTIoOs,4237 -pip-23.2.dist-info/RECORD,, -pip-23.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip-23.2.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 -pip-23.2.dist-info/entry_points.txt,sha256=xg35gOct0aY8S3ftLtweJ0uw3KBAIVyW4k-0Jx1rkNE,125 -pip-23.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip/__init__.py,sha256=kGP5CkhRNi6gWTpwTuWW9fGo6H785hpjAB-lzJDObMY,355 +pip-23.2.1.dist-info/AUTHORS.txt,sha256=Pd_qYtjluu4WDft2A179dPtIvwYVBNtDfccCitVRMQM,10082 +pip-23.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip-23.2.1.dist-info/LICENSE.txt,sha256=Y0MApmnUmurmWxLGxIySTFGkzfPR_whtw0VtyLyqIQQ,1093 +pip-23.2.1.dist-info/METADATA,sha256=yHPLQvsD1b6f-zdCQWMibZXbsAjs886JMSh3C0oxRhQ,4239 +pip-23.2.1.dist-info/RECORD,, +pip-23.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip-23.2.1.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +pip-23.2.1.dist-info/entry_points.txt,sha256=xg35gOct0aY8S3ftLtweJ0uw3KBAIVyW4k-0Jx1rkNE,125 +pip-23.2.1.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/__init__.py,sha256=hELWH3UN2ilBntczbn1BJOIzJEoiE8w9H-gsR5TeuEk,357 pip/__main__.py,sha256=WzbhHXTbSE6gBY19mNN9m4s5o_365LOvTYSgqgbdBhE,854 pip/__pip-runner__.py,sha256=EnrfKmKMzWAdqg_JicLCOP9Y95Ux7zHh4ObvqLtQcjo,1444 pip/__pycache__/__init__.cpython-311.pyc,, @@ -49,7 +49,7 @@ pip/_internal/cli/main.py,sha256=Uzxt_YD1hIvB1AW5mxt6IVcht5G712AtMqdo51UMhmQ,281 pip/_internal/cli/main_parser.py,sha256=laDpsuBDl6kyfywp9eMMA9s84jfH2TJJn-vmL0GG90w,4338 pip/_internal/cli/parser.py,sha256=tWP-K1uSxnJyXu3WE0kkH3niAYRBeuUaxeydhzOdhL4,10817 pip/_internal/cli/progress_bars.py,sha256=So4mPoSjXkXiSHiTzzquH3VVyVD_njXlHJSExYPXAow,1968 -pip/_internal/cli/req_command.py,sha256=XajWVmfnVs3LCuXea9cXq9LH76z7uau_4gYH-ykAtto,18328 +pip/_internal/cli/req_command.py,sha256=GqS9jkeHktOy6zRzC6uhcRY7SelnAV1LZ6OfS_gNcEk,18440 pip/_internal/cli/spinners.py,sha256=hIJ83GerdFgFCdobIA23Jggetegl_uC4Sp586nzFbPE,5118 pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116 pip/_internal/commands/__init__.py,sha256=5oRO9O3dM2vGuh0bFw4HOVletryrz5HHMmmPWwJrH9U,3882 @@ -170,7 +170,7 @@ pip/_internal/network/auth.py,sha256=TC-OcW2KU4W6R1hU4qPgQXvVH54adACpZz6sWq-R9NA pip/_internal/network/cache.py,sha256=hgXftU-eau4MWxHSLquTMzepYq5BPC2zhCkhN3glBy8,2145 pip/_internal/network/download.py,sha256=HvDDq9bVqaN3jcS3DyVJHP7uTqFzbShdkf7NFSoHfkw,6096 pip/_internal/network/lazy_wheel.py,sha256=2PXVduYZPCPZkkQFe1J1GbfHJWeCU--FXonGyIfw9eU,7638 -pip/_internal/network/session.py,sha256=J36m7MhXDI20J91izrQQRjZSSpe0LIJVjPWuYgIfKr0,18442 +pip/_internal/network/session.py,sha256=uhovd4J7abd0Yr2g426yC4aC6Uw1VKrQfpzalsEBEMw,18607 pip/_internal/network/utils.py,sha256=6A5SrUJEEUHxbGtbscwU2NpCyz-3ztiDlGWHpRRhsJ8,4073 pip/_internal/network/xmlrpc.py,sha256=AzQgG4GgS152_cqmGr_Oz2MIXsCal-xfsis7fA7nmU0,1791 pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 @@ -202,7 +202,7 @@ pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc,, pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc,, pip/_internal/operations/install/editable_legacy.py,sha256=YeR0KadWXw_ZheC1NtAG1qVIEkOgRGHc23x-YtGW7NU,1282 pip/_internal/operations/install/wheel.py,sha256=8lsVMt_FAuiGNsf_e7C7_cCSOEO7pHyjgVmRNx-WXrw,27475 -pip/_internal/operations/prepare.py,sha256=NF_ErIXh9yxxw_nhdeagAj5WqMp1tDpP3TErPNonm8g,28571 +pip/_internal/operations/prepare.py,sha256=nxjIiGRSiUUSRFpwN-Qro7N6BE9jqV4mudJ7CIv9qwY,28868 pip/_internal/pyproject.py,sha256=ltmrXWaMXjiJHbYyzWplTdBvPYPdKk99GjKuQVypGZU,7161 pip/_internal/req/__init__.py,sha256=TELFgZOof3lhMmaICVWL9U7PlhXo9OufokbMAJ6J2GI,2738 pip/_internal/req/__pycache__/__init__.cpython-311.pyc,, @@ -289,7 +289,7 @@ pip/_internal/utils/glibc.py,sha256=Mesxxgg3BLxheLZx-dSf30b6gKpOgdVXw6W--uHSszQ, pip/_internal/utils/hashes.py,sha256=MjOigC75z6qoRMkgHiHqot7eqxfwDZSrEflJMPm-bHE,5118 pip/_internal/utils/inject_securetransport.py,sha256=o-QRVMGiENrTJxw3fAhA7uxpdEdw6M41TjHYtSVRrcg,795 pip/_internal/utils/logging.py,sha256=U2q0i1n8hPS2gQh8qcocAg5dovGAa_bR24akmXMzrk4,11632 -pip/_internal/utils/misc.py,sha256=Ldna7NxC8dPR7r-DnbQxg_RNJ2GraVv9MBxoY3m8oD8,22377 +pip/_internal/utils/misc.py,sha256=Ds3rSQU7HbdAywwmEBcPnVoLB1Tp_2gL6IbaWcpe8i0,22343 pip/_internal/utils/models.py,sha256=5GoYU586SrxURMvDn_jBMJInitviJg4O5-iOU-6I0WY,1193 pip/_internal/utils/packaging.py,sha256=5Wm6_x7lKrlqVjPI5MBN_RurcRHwVYoQ7Ksrs84de7s,2108 pip/_internal/utils/setuptools_build.py,sha256=ouXpud-jeS8xPyTPsXJ-m34NPvK5os45otAzdSV_IJE,4435 diff --git a/lib/python3.11/site-packages/pip-23.2.dist-info/REQUESTED b/lib/python3.11/site-packages/pip-23.2.1.dist-info/REQUESTED similarity index 100% rename from lib/python3.11/site-packages/pip-23.2.dist-info/REQUESTED rename to lib/python3.11/site-packages/pip-23.2.1.dist-info/REQUESTED diff --git a/lib/python3.11/site-packages/pip-23.2.dist-info/WHEEL b/lib/python3.11/site-packages/pip-23.2.1.dist-info/WHEEL similarity index 100% rename from lib/python3.11/site-packages/pip-23.2.dist-info/WHEEL rename to lib/python3.11/site-packages/pip-23.2.1.dist-info/WHEEL diff --git a/lib/python3.11/site-packages/pip-23.2.dist-info/entry_points.txt b/lib/python3.11/site-packages/pip-23.2.1.dist-info/entry_points.txt similarity index 100% rename from lib/python3.11/site-packages/pip-23.2.dist-info/entry_points.txt rename to lib/python3.11/site-packages/pip-23.2.1.dist-info/entry_points.txt diff --git a/lib/python3.11/site-packages/pip-23.2.dist-info/top_level.txt b/lib/python3.11/site-packages/pip-23.2.1.dist-info/top_level.txt similarity index 100% rename from lib/python3.11/site-packages/pip-23.2.dist-info/top_level.txt rename to lib/python3.11/site-packages/pip-23.2.1.dist-info/top_level.txt diff --git a/lib/python3.11/site-packages/pip/__init__.py b/lib/python3.11/site-packages/pip/__init__.py index 696541d..6633ef7 100644 --- a/lib/python3.11/site-packages/pip/__init__.py +++ b/lib/python3.11/site-packages/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.2" +__version__ = "23.2.1" def main(args: Optional[List[str]] = None) -> int: diff --git a/lib/python3.11/site-packages/pip/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/__pycache__/__init__.cpython-311.pyc index 7cb920c..9f2ee96 100644 Binary files a/lib/python3.11/site-packages/pip/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/__pycache__/__main__.cpython-311.pyc b/lib/python3.11/site-packages/pip/__pycache__/__main__.cpython-311.pyc index 6bb788e..cb873ba 100644 Binary files a/lib/python3.11/site-packages/pip/__pycache__/__main__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/__pycache__/__main__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/__pycache__/__pip-runner__.cpython-311.pyc b/lib/python3.11/site-packages/pip/__pycache__/__pip-runner__.cpython-311.pyc index c2b35e1..454f25b 100644 Binary files a/lib/python3.11/site-packages/pip/__pycache__/__pip-runner__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/__pycache__/__pip-runner__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc index 5748dc2..e438f9d 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc index 81f75bc..631f111 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc index 3421ce0..a0b1dad 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc index a6e108c..0a25477 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc index f92b5ad..2e47ac1 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/__pycache__/main.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/__pycache__/main.cpython-311.pyc index 7b780b1..d4bf8e5 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/__pycache__/main.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/__pycache__/main.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc index 40d2077..932b912 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc index da3a884..f5199c8 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc index 1aee237..a995d8a 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc index ff2f5da..a07e02a 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc index ee2de18..db41456 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc index c025b53..fc7978c 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc index 49dec06..73b113c 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc index c749bee..c9e8e0a 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc index ce8273f..78b84e9 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc index b0417d2..7e835d2 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc index 00d42ea..dc1b435 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc index cd55bce..0cb0b42 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc index 932a500..3cd186f 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc index daf50e8..78062b9 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc index f9bb354..10f035f 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/cli/req_command.py b/lib/python3.11/site-packages/pip/_internal/cli/req_command.py index c2f4e38..86070f1 100644 --- a/lib/python3.11/site-packages/pip/_internal/cli/req_command.py +++ b/lib/python3.11/site-packages/pip/_internal/cli/req_command.py @@ -287,6 +287,7 @@ class RequirementCommand(IndexGroupCommand): """ temp_build_dir_path = temp_build_dir.path assert temp_build_dir_path is not None + legacy_resolver = False resolver_variant = cls.determine_resolver_variant(options) if resolver_variant == "2020-resolver": @@ -300,6 +301,7 @@ class RequirementCommand(IndexGroupCommand): "production." ) else: + legacy_resolver = True lazy_wheel = False if "fast-deps" in options.features_enabled: logger.warning( @@ -320,6 +322,7 @@ class RequirementCommand(IndexGroupCommand): use_user_site=use_user_site, lazy_wheel=lazy_wheel, verbosity=verbosity, + legacy_resolver=legacy_resolver, ) @classmethod diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc index 3af4202..7870775 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/cache.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/cache.cpython-311.pyc index dfcc135..26618bb 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/cache.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/cache.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/check.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/check.cpython-311.pyc index 1b636ef..271e56e 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/check.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/check.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/completion.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/completion.cpython-311.pyc index 298f736..4b97f5e 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/completion.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/completion.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-311.pyc index d925498..3b5b9af 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/debug.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/debug.cpython-311.pyc index eaf0ae5..cf809b8 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/debug.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/debug.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/download.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/download.cpython-311.pyc index 3136375..343c949 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/download.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/download.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-311.pyc index 9b3df37..eea38a1 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/hash.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/hash.cpython-311.pyc index aed60b1..f3e44be 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/hash.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/hash.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/help.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/help.cpython-311.pyc index 035f383..eb1ede6 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/help.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/help.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/index.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/index.cpython-311.pyc index e4403bf..feb097e 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/index.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/index.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-311.pyc index dd8efdf..569cefe 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pyc index 1904849..3b47ae2 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/list.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/list.cpython-311.pyc index 7838bf0..8943613 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/list.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/list.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/search.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/search.cpython-311.pyc index 3842f3b..3957488 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/search.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/search.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/show.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/show.cpython-311.pyc index 627fd62..6dcee45 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/show.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/show.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-311.pyc index 5991b7e..b8b1725 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-311.pyc index 2959e89..8399e07 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc index 427134f..0bdd847 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc index 5aaf91f..353d4b4 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc index c72cd5f..3f49964 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc index b912ed1..43ce343 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc index eafff0d..de2bc85 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc index 6cab8c6..f0c4545 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc index 44cc706..c134ae7 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc index fff7418..50f88f1 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc index a079e7b..12bb76d 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pyc index 7e8f890..e68d988 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-311.pyc index fd8aa88..a83611b 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pyc index f706f57..d6f2683 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pyc index 5d7ffa3..0043fd1 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pyc index bc50cd7..efba712 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pyc index 34414ff..17aa583 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pyc index 9476d9e..0e1b308 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc index 5b8ae95..6563a0c 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc index 6e1a3f3..fa10763 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc index c667f35..4b0537c 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc index 43992ea..12e7657 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc index 6355e6a..bd61cb4 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pyc index 16e5fb8..be28c9e 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pyc index e23caa6..7b121c2 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pyc index 9bbf09e..c5463ea 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pyc index 6c87a54..a2cb6fa 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pyc index 9325083..3fec879 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pyc index 123547e..3470524 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pyc index f9b1016..09ba76e 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pyc index 2c8d845..35cc631 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pyc index b3edff2..00aed3c 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pyc index 9e6ed07..1a594b6 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pyc index 314b01b..b5a6606 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pyc index 6eee8a1..b0ce05f 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pyc index 4738407..802d22f 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc index 6b8ec75..4d87032 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc index 07755ca..566c315 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pyc index ee5017e..227a40f 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc index 9f3b31f..c763a43 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pyc index c6d10d5..19c8304 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pyc index ad088d6..ad52145 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-311.pyc index aec0cf7..5591197 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/network/session.py b/lib/python3.11/site-packages/pip/_internal/network/session.py index 6c40ade..887dc14 100644 --- a/lib/python3.11/site-packages/pip/_internal/network/session.py +++ b/lib/python3.11/site-packages/pip/_internal/network/session.py @@ -419,15 +419,17 @@ class PipSession(requests.Session): msg += f" (from {source})" logger.info(msg) - host_port = parse_netloc(host) - if host_port not in self.pip_trusted_origins: - self.pip_trusted_origins.append(host_port) + parsed_host, parsed_port = parse_netloc(host) + if parsed_host is None: + raise ValueError(f"Trusted host URL must include a host part: {host!r}") + if (parsed_host, parsed_port) not in self.pip_trusted_origins: + self.pip_trusted_origins.append((parsed_host, parsed_port)) self.mount( build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter ) self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter) - if not host_port[1]: + if not parsed_port: self.mount( build_url_from_netloc(host, scheme="http") + ":", self._trusted_host_adapter, diff --git a/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc index 22cc562..c6a6139 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc index 6499fcf..8ef242c 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-311.pyc index d9b50cc..2acda6c 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc index ecf094f..83f2c59 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc index 53e4461..3c6c7c8 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc index e12fc67..a617174 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc index 6b45131..1ddcddd 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc index 26799b4..269ab53 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc index 7284f37..a14b17b 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc index 3255ee8..74596cc 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc index 832a618..64c5b50 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc index 81c4d50..043e108 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc index 4bb94c9..5d3c0fa 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc index 27c667c..c84e339 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc index 1630350..978b3a6 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/operations/prepare.py b/lib/python3.11/site-packages/pip/_internal/operations/prepare.py index 49d8626..cb121bc 100644 --- a/lib/python3.11/site-packages/pip/_internal/operations/prepare.py +++ b/lib/python3.11/site-packages/pip/_internal/operations/prepare.py @@ -226,6 +226,7 @@ class RequirementPreparer: use_user_site: bool, lazy_wheel: bool, verbosity: int, + legacy_resolver: bool, ) -> None: super().__init__() @@ -259,6 +260,9 @@ class RequirementPreparer: # How verbose should underlying tooling be? self.verbosity = verbosity + # Are we using the legacy resolver? + self.legacy_resolver = legacy_resolver + # Memoized downloaded files, as mapping of url: path. self._downloaded: Dict[str, str] = {} @@ -365,6 +369,11 @@ class RequirementPreparer: self, req: InstallRequirement, ) -> Optional[BaseDistribution]: + if self.legacy_resolver: + logger.debug( + "Metadata-only fetching is not used in the legacy resolver", + ) + return None if self.require_hashes: logger.debug( "Metadata-only fetching is not used as hash checking is required", diff --git a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pyc index 4d03449..3010097 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pyc index 314f784..b835bc0 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pyc index c20ba19..1bbc6f0 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pyc index 85b5953..ae5d477 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pyc index 550ea0f..5d1f2ce 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pyc index d289785..044ff25 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc index 550be22..d9316de 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc index 45438b3..c361c95 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pyc index 70c4763..61406d9 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pyc index c864a28..ee3e52d 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc index 9e1cf0a..d208af5 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc index 8bcd2d2..7104fb2 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc index 6024587..d2c85d4 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc index ed02a89..79ed165 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc index e2b86a5..255c78f 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc index 213d687..bf93b59 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc index 66415ba..e509e82 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc index f3f34b8..2115cb9 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc index 97a2b61..93c9848 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pyc index fa1a58d..9aac65f 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pyc index 5ef4d99..66854e5 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pyc index d3e26aa..61521d9 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc index aab63c7..54d8af1 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pyc index 7dfe6cc..43246c7 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc index 20e8d7e..51a59a6 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-311.pyc index 7d83db1..718081f 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc index 37281d6..ac9add8 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pyc index 88889de..3ab1bc0 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc index 79e1130..2adff05 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pyc index cff3c73..da6e17b 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pyc index 58a3321..d26b928 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc index 0c1606c..13e5509 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pyc index 42cba51..90657c7 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pyc index af6f4f1..cc9bf3f 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pyc index ac311ba..94c498a 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-311.pyc index b029e64..13c5913 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pyc index f20bafa..9c22ee2 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc index 33922a5..1ee2b76 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pyc index 0196979..902bb56 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pyc index 4e0c523..966b016 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pyc index 474b97f..642b196 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pyc index 75a0ece..0936742 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pyc index 970870b..89ba264 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc index 47fd777..aec1b8b 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pyc index 770ff95..bef2293 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pyc index 6fa3748..ef6debf 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pyc index acb561a..77eaadb 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/utils/misc.py b/lib/python3.11/site-packages/pip/_internal/utils/misc.py index afcf170..bd191c4 100644 --- a/lib/python3.11/site-packages/pip/_internal/utils/misc.py +++ b/lib/python3.11/site-packages/pip/_internal/utils/misc.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import contextlib import errno import getpass @@ -344,17 +341,18 @@ def write_output(msg: Any, *args: Any) -> None: class StreamWrapper(StringIO): - orig_stream: TextIO = None + orig_stream: TextIO @classmethod def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper": - cls.orig_stream = orig_stream - return cls() + ret = cls() + ret.orig_stream = orig_stream + return ret # compileall.compile_dir() needs stdout.encoding to print to stdout - # https://github.com/python/mypy/issues/4125 + # type ignore is because TextIOBase.encoding is writeable @property - def encoding(self): # type: ignore + def encoding(self) -> str: # type: ignore return self.orig_stream.encoding @@ -422,7 +420,7 @@ def build_url_from_netloc(netloc: str, scheme: str = "https") -> str: return f"{scheme}://{netloc}" -def parse_netloc(netloc: str) -> Tuple[str, Optional[int]]: +def parse_netloc(netloc: str) -> Tuple[Optional[str], Optional[int]]: """ Return the host-port pair from a netloc. """ @@ -510,7 +508,9 @@ def _redact_netloc(netloc: str) -> Tuple[str]: return (redact_netloc(netloc),) -def split_auth_netloc_from_url(url: str) -> Tuple[str, str, Tuple[str, str]]: +def split_auth_netloc_from_url( + url: str, +) -> Tuple[str, str, Tuple[Optional[str], Optional[str]]]: """ Parse a url into separate netloc, auth, and url with no auth. diff --git a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pyc index 3bc53a9..08175c3 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pyc index 1326fbb..670ef09 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pyc index bcbe652..d44fb49 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pyc index bd28a21..3494dd5 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pyc index cc88713..22517de 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pyc b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pyc index 6fb9528..247014d 100644 Binary files a/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/__pycache__/__init__.cpython-311.pyc index 6022f52..5febaf9 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/__pycache__/six.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/__pycache__/six.cpython-311.pyc index e471e9b..b48c66c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/__pycache__/six.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/__pycache__/six.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-311.pyc index 0f6bb57..2d62df3 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-311.pyc index 9cd46d9..52466a7 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-311.pyc index e04abd9..4027383 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-311.pyc index 26825e1..4dac374 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-311.pyc index aa4bd22..8252125 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-311.pyc index 11d2e84..ab8fa5b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-311.pyc index b5b48bc..b955b09 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-311.pyc index 5865705..d53c7dd 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-311.pyc index 57c0b5b..7465273 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-311.pyc index 873aa63..53deba8 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-311.pyc index 528c2ca..f3b2833 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-311.pyc index e25358a..e4b135d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-311.pyc index 21b0ce0..c7b7fa5 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-311.pyc index 6bcbb25..22a5a1f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-311.pyc index 2bd097a..f70808c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-311.pyc index ad13d9b..3b9aa52 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-311.pyc index 80da25c..2db2cbb 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-311.pyc index c8c3e3d..013a7ac 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-311.pyc index 6d063ae..7dc703f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-311.pyc index 5e45166..e3f18ba 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-311.pyc index 8c1e172..63feb1d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-311.pyc index e91717a..7f7b87e 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-311.pyc index 254c882..b412e79 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-311.pyc index 2c08f7a..4f648cc 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-311.pyc index 4b29ec3..19f822b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-311.pyc index e1f9c6b..fbf17eb 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-311.pyc index 696aa37..2dda4ad 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-311.pyc index 24a45fc..e531304 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-311.pyc index 24da8ac..96db7f8 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-311.pyc index 9fddcce..b6796f0 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-311.pyc index e0cecf2..c816d8a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-311.pyc index e0c1fb1..9ed33fa 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-311.pyc index 91da6f8..68c2808 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-311.pyc index efd48e1..81030fd 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-311.pyc index a15ce7b..022112a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-311.pyc index 4dbfe65..b55fa0c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-311.pyc index 77fedff..4255244 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-311.pyc index d36e3fa..d4adeca 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-311.pyc index 94fa450..ecd816d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-311.pyc index 7bb06ce..c8ede03 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-311.pyc index e4282f2..5e987dd 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-311.pyc index 6d8918c..ab12d17 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-311.pyc index 33d03e8..aa2fe37 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-311.pyc index 770a473..809a0cd 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-311.pyc index 89f813e..86fe16e 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-311.pyc index bd353d9..ef4e0f8 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-311.pyc index 0408b69..3655b48 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-311.pyc index 27afb5f..7eccc77 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-311.pyc index 866b7bc..cb0d4d8 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/macromanprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/macromanprober.cpython-311.pyc index fc6093f..99b34fb 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/macromanprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/macromanprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-311.pyc index 9deb3c4..0f6916d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-311.pyc index bd380a6..de23c80 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-311.pyc index b496a2e..71d8b4c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/resultdict.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/resultdict.cpython-311.pyc index d39b645..0776690 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/resultdict.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/resultdict.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-311.pyc index ec81046..3918b43 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-311.pyc index f09ae4e..b8ff38d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-311.pyc index 7bb286d..a551c0b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-311.pyc index 1c20aeb..e6b7d11 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-311.pyc index 785170f..0c5cedf 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-311.pyc index 8e54a96..8bc8646 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-311.pyc index 6e7c59f..5edde6b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-311.pyc index 04cac32..fdcdc96 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-311.pyc index 6b2664e..0b2e452 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-311.pyc index 599a224..923c135 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-311.pyc index 2f8d444..a3a6a71 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-311.pyc index 536ff65..22f1b45 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-311.pyc index 4f5c458..dad33ca 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-311.pyc index e0f2bb7..d1ea2f1 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-311.pyc index a1a02f5..3be00cd 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-311.pyc index 131ee38..4a41e3d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-311.pyc index eb9073c..07d94d9 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/__init__.cpython-311.pyc index c60a7be..a2aea0a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/ansi_test.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/ansi_test.cpython-311.pyc index 77c08c0..a402735 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/ansi_test.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/ansi_test.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/ansitowin32_test.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/ansitowin32_test.cpython-311.pyc index 9620b99..fedb827 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/ansitowin32_test.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/ansitowin32_test.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/initialise_test.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/initialise_test.cpython-311.pyc index d5bc278..99eb975 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/initialise_test.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/initialise_test.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/isatty_test.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/isatty_test.cpython-311.pyc index 1b825dc..9b57964 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/isatty_test.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/isatty_test.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/utils.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/utils.cpython-311.pyc index 3fdd81a..7d6470b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/utils.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/utils.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/winterm_test.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/winterm_test.cpython-311.pyc index b40a3c3..06b268f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/winterm_test.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/colorama/tests/__pycache__/winterm_test.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-311.pyc index 4866f7e..7bd459f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-311.pyc index e0f0de4..7b357ff 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-311.pyc index 98ce5fb..b3a9a76 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-311.pyc index cd40407..f5acc91 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-311.pyc index d899947..51dcd5d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-311.pyc index 0bc658d..83261c6 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-311.pyc index 25bdffa..61c9392 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-311.pyc index 6cadc1b..b68213f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-311.pyc index 7b0fa15..3f040da 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-311.pyc index cefdea8..6c33c6e 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-311.pyc index c716a6a..b91eec9 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-311.pyc index 69d9170..7ae60e6 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-311.pyc index c9d7381..7bf82fa 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-311.pyc index 520a1ee..117f74b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-311.pyc index b09f34a..a9f6a9b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-311.pyc index eb93f15..ee274d4 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-311.pyc index 10c7b18..21a0cac 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-311.pyc index 3a96118..4d3e521 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-311.pyc index 740b904..d30cf77 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/core.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/core.cpython-311.pyc index b684c41..fb35ebe 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/core.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/core.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-311.pyc index 22aa3fe..2d470a9 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-311.pyc index f131f9d..5e88f1f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-311.pyc index c3ac5fc..dea07fb 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-311.pyc index 2a0ae90..feb9d4c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-311.pyc index d590299..278fa23 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-311.pyc index 7a0c7c9..d47bb37 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-311.pyc index 0c1a62d..41b64ed 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-311.pyc index c0f169e..3924465 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-311.pyc index 743b1a1..c1cbb70 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc index 40ebe81..c686d00 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc index 5fbf379..45d977e 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc index bdd528d..ba2d00f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc index a94727e..698f61f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-311.pyc index 3c9cfdd..328bbd7 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc index e88a97e..6599ea9 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc index 67cc2b1..d470c73 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc index df54cc1..e5d1210 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc index ded85e3..fb5a21c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc index b16138a..a26e655 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-311.pyc index e5fa5f8..d7ce82d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc index 0583135..af99b3a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-311.pyc index ded62bd..79f2b65 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-311.pyc index b64bfc9..e41a35d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-311.pyc index 838b0ae..15a1cab 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-311.pyc index 637e1c5..c7d2e96 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-311.pyc index c89a016..7aaafce 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-311.pyc index 585466b..3a68b46 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc index 80e1580..ea80f38 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-311.pyc index cf5fa04..28a3e96 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-311.pyc index abb03ac..9a3830c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/__main__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/cmdline.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/cmdline.cpython-311.pyc index e55761a..c361519 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/cmdline.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/cmdline.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-311.pyc index e0757ad..69327a3 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-311.pyc index 884d78a..267bbdc 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/filter.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-311.pyc index 817e18f..5d11fcf 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/formatter.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-311.pyc index 66163ec..9f9c9dd 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/lexer.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-311.pyc index 4c2913d..ea283de 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/modeline.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-311.pyc index b1202d2..3292d27 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/plugin.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-311.pyc index 740a724..e6e3c8f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/regexopt.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-311.pyc index c742901..5199185 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/scanner.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-311.pyc index 54e9531..28184d0 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-311.pyc index 8f1623a..ecf3abb 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/style.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-311.pyc index 495a53c..5438891 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-311.pyc index bd01c73..f16ff13 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/unistring.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-311.pyc index 40532de..7e001c0 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/__pycache__/util.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-311.pyc index f71a178..415aad9 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/filters/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-311.pyc index 948afd6..9d913e9 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-311.pyc index bc0acd5..e00a055 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-311.pyc index 1b0af1b..1e5cd26 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/groff.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/groff.cpython-311.pyc index a013b18..c2b6877 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/groff.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/groff.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/html.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/html.cpython-311.pyc index 91b4a36..143b400 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/html.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/html.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/img.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/img.cpython-311.pyc index 8d370b1..df0c1e4 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/img.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/img.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/irc.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/irc.cpython-311.pyc index fd48dc9..28af947 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/irc.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/irc.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/latex.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/latex.cpython-311.pyc index 17b9139..e9ba256 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/latex.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/latex.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/other.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/other.cpython-311.pyc index 608251b..8f7a217 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/other.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/other.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-311.pyc index b217f5a..21abf8b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-311.pyc index bd946b8..7649cf0 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/svg.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/svg.cpython-311.pyc index 7307209..efae0a5 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/svg.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/svg.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-311.pyc index f2c7c37..0740812 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-311.pyc index d26eab9..57884c0 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-311.pyc index 40edd7a..6bcbef6 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-311.pyc index 4a16eb3..33ad5ab 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-311.pyc index 95dfd10..3f8d526 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__pycache__/python.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-311.pyc index e9c4f91..59e1b6b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pygments/styles/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-311.pyc index a0d6300..e5b3426 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-311.pyc index 2191f71..41b6226 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-311.pyc index 7acff23..2f3fc3a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-311.pyc index 93b1158..a1e87b6 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-311.pyc index 40baf59..d752300 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-311.pyc index 9ac3c7b..0687186 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-311.pyc index 607bcad..74c3f58 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-311.pyc index 9aaf744..8c5d1c2 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-311.pyc index 987c859..fbfee2a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-311.pyc index a6bd7e0..3c73949 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-311.pyc index e66c1a6..16934da 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-311.pyc index 9c58e9c..a7900b4 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_compat.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_compat.cpython-311.pyc index 3a089d2..7aa29ea 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_compat.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_compat.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-311.pyc index e7418d7..cbcf18b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-311.pyc index 8190eb7..4f461e3 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-311.pyc index 9288661..1ebdda9 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-311.pyc index d06ebb6..d33a66b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-311.pyc index b1b5990..7541b7b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-311.pyc index d388fc8..f935b83 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-311.pyc index 808c42b..fac6462 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/api.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/api.cpython-311.pyc index b23c3d1..7dae0c7 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/api.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/api.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-311.pyc index 0f5002a..54fc81c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-311.pyc index 1876031..88cb8e0 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-311.pyc index e91438d..0c468e0 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-311.pyc index 390aa44..2983a97 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-311.pyc index ef38357..409f7d0 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/help.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/help.cpython-311.pyc index 95756d8..3132dd5 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/help.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/help.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-311.pyc index 7661d74..bb82f2f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/models.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/models.cpython-311.pyc index f4043c0..b8eea21 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/models.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/models.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-311.pyc index 01e5cf0..f9b9bba 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-311.pyc index d96499a..38d1177 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-311.pyc index 0dfe6db..7a62b29 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-311.pyc index b384feb..2fe442d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-311.pyc index c5e3eb9..7d822b8 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-311.pyc index 70b3c7f..fd8057d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-311.pyc index 0670e59..b95fe99 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-311.pyc index 675051f..b00d1f7 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-311.pyc index bb4a3d4..f751c3c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-311.pyc index dc6c4ab..a2534d3 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-311.pyc index a27733c..eba5f63 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-311.pyc index 4d7ffc0..e351c54 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-311.pyc index 7255fbd..1b35306 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-311.pyc index a43d289..e66eac3 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/__main__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-311.pyc index 64ea013..3ad4ad6 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-311.pyc index 52de723..12bcbef 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-311.pyc index e217386..84c9ece 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-311.pyc index ae9b94a..4fbe641 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-311.pyc index 0bfb632..5bebb34 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_extension.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-311.pyc index 63595ea..53cee77 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-311.pyc index 7b6902b..373ee55 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-311.pyc index 655c015..9282d19 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-311.pyc index 6eb5cf5..2e5818e 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_loop.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-311.pyc index 4cdc637..a58f0a5 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_null_file.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-311.pyc index 285f873..df1a293 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_palettes.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-311.pyc index 1262b61..7604c8d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_pick.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-311.pyc index 8bb6cc9..5280079 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_ratio.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-311.pyc index f2ee3e0..c983e7d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_spinners.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-311.pyc index b5556ca..cb4f584 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_stack.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-311.pyc index b35fd9c..e0c948c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_timer.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-311.pyc index 4e2a5d5..efbb6d4 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_win32_console.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-311.pyc index dd0cd72..ed6943d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_windows.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-311.pyc index e50ced5..6bed992 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_windows_renderer.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-311.pyc index 2eac983..5ddd71b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/_wrap.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-311.pyc index dd4fb9b..e123804 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/abc.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/align.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/align.cpython-311.pyc index 503b543..aa23956 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/align.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/align.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-311.pyc index c0ac2ba..9a5d2f5 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/ansi.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-311.pyc index 62df538..1e5a6c9 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/bar.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/box.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/box.cpython-311.pyc index 030649a..44b6e84 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/box.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/box.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-311.pyc index d422fec..374815f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/cells.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/color.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/color.cpython-311.pyc index d60ffaa..2603aae 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/color.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/color.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-311.pyc index 49f32ed..b49f9d5 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/color_triplet.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-311.pyc index 82cd01c..dae04ac 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/columns.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/console.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/console.cpython-311.pyc index dc62538..b85d2cd 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/console.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/console.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-311.pyc index 51e832d..ec0e2bb 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/constrain.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-311.pyc index d5780dd..e01b2af 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/containers.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/control.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/control.cpython-311.pyc index 975eadf..13dbb4c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/control.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/control.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-311.pyc index a6d18f3..0065126 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/default_styles.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-311.pyc index 8d31031..b78a028 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/diagnose.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-311.pyc index e0d2918..721fee2 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/emoji.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-311.pyc index 1b782e5..f05302a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/errors.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-311.pyc index 787d280..02ff2c7 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/file_proxy.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-311.pyc index 74e06e2..a56d35a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/filesize.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-311.pyc index 97658c8..46870d1 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/highlighter.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/json.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/json.cpython-311.pyc index 6b6b8e6..0aaa264 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/json.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/json.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-311.pyc index 61a5f3c..a379364 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/jupyter.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-311.pyc index 0cd9c45..fe436d3 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/layout.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/live.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/live.cpython-311.pyc index fd6b956..c2da438 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/live.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/live.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-311.pyc index 407063e..f2b94da 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/live_render.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-311.pyc index 6ac2572..d08af8d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/logging.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-311.pyc index 44d9c78..32d8742 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/markup.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-311.pyc index c4da0d0..88f5fa5 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/measure.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-311.pyc index a9a08aa..9078025 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/padding.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-311.pyc index 1d291ff..cca118b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/pager.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-311.pyc index 3c372f9..62477d2 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/palette.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-311.pyc index 7c0ac8b..fa2d2c6 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/panel.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-311.pyc index 4c1ec34..d413337 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/pretty.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-311.pyc index 26f538d..7eca135 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/progress.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-311.pyc index 6717d9c..e698688 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/progress_bar.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-311.pyc index 529aa13..82fd486 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/prompt.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-311.pyc index 297c148..b7605a8 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/protocol.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/region.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/region.cpython-311.pyc index 49b9a1f..656bfa6 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/region.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/region.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-311.pyc index f8a372a..44462c3 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/repr.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-311.pyc index ddda3de..d36463d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/rule.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-311.pyc index 5b0faeb..a5d7886 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/scope.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-311.pyc index 9ecbe50..767731d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/screen.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-311.pyc index f9fa03c..e4bfe39 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/segment.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-311.pyc index b7bf2b1..9e76d7a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/spinner.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/status.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/status.cpython-311.pyc index 497fffd..7318945 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/status.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/status.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/style.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/style.cpython-311.pyc index 72a0336..44dc4ba 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/style.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/style.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-311.pyc index aad4019..ea33fd3 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/styled.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-311.pyc index 2c963ab..d71360b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/syntax.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/table.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/table.cpython-311.pyc index 6c8c8c5..e18a86f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/table.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/table.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-311.pyc index c7ef6cc..a148b8b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/terminal_theme.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/text.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/text.cpython-311.pyc index 783e1d4..23f57c4 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/text.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/text.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-311.pyc index 3e05c3e..6e68c78 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/theme.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-311.pyc index 559604a..8f78cee 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/themes.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-311.pyc index 6061b90..177b26d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/traceback.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-311.pyc index 3655b3f..7af1e73 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/rich/__pycache__/tree.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-311.pyc index f33489a..ef77b52 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-311.pyc index bbcdad3..2c71031 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-311.pyc index 67ef15c..1596701 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-311.pyc index a3290cc..c3f5e55 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-311.pyc index 07c233e..6c8ff8f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-311.pyc index cdf70da..415ddfb 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-311.pyc index 6c456e0..c7d04a1 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-311.pyc index 35e0ba0..d575780 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-311.pyc index 10e83af..43b91ec 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-311.pyc index 993a6fc..54895fb 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-311.pyc index 26b641a..ed44860 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-311.pyc index 2881e10..ec870ca 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-311.pyc index 666521a..64d167d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_parser.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-311.pyc index 1efe94b..45e58d0 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_re.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-311.pyc index 5009543..0b45f4c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/tomli/__pycache__/_types.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-311.pyc index b0b4ea1..364d2ce 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-311.pyc index 88564f4..8555f9f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-311.pyc index 48388ab..c9b740a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-311.pyc index b9c51aa..abd80c0 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-311.pyc index a93e813..0d9eb77 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-311.pyc index e91e7d5..9aa5a11 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-311.pyc index 0847047..c8a9a8b 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-311.pyc index 6597b09..0402740 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-311.pyc index da3598a..9c93670 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-311.pyc index 5816bde..1cb61b2 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-311.pyc index 006c2a1..bd1396a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-311.pyc index 06cea54..3a22c03 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-311.pyc index e4852a3..4fb522c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-311.pyc index 53e3083..479f259 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-311.pyc index 0c2bdb0..4681742 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-311.pyc index 8f70c88..09b065e 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-311.pyc index 4b9d2c1..f7fb4e6 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-311.pyc index 965399d..a63ebdf 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-311.pyc index bd0c2db..19b1d16 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-311.pyc index 817fbca..bb1da8c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-311.pyc index 33b552b..63ae4ab 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-311.pyc index fc037b2..a56b70d 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-311.pyc index 41391ea..549421e 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-311.pyc index 1a4f7ba..df837a3 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-311.pyc index 40373f9..194017a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-311.pyc index fab68cb..f45b62c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-311.pyc index 1020cb1..e8794fd 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-311.pyc index 19b88ca..4f9da59 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-311.pyc index a89a317..97a04cd 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-311.pyc index 7b9a0bf..75ce9ed 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-311.pyc index 99434ca..e4a92a5 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-311.pyc index cca64e0..a53449a 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-311.pyc index e01568c..4acb146 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-311.pyc index 5d224a0..be898ca 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc index 52452e7..2d8b482 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-311.pyc index 47400ae..263bdd7 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-311.pyc index 8160624..0ae20a0 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-311.pyc index 34301c1..f02df1f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-311.pyc index f2ac21e..3619d48 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/__init__.cpython-311.pyc index e02b988..6ba28b9 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/__init__.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/labels.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/labels.cpython-311.pyc index 16d811b..b0b311f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/labels.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/labels.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/mklabels.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/mklabels.cpython-311.pyc index 163b64b..b87c0ab 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/mklabels.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/mklabels.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/tests.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/tests.cpython-311.pyc index e3c1516..8b0df7f 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/tests.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/tests.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-311.pyc b/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-311.pyc index d72f003..f08df3c 100644 Binary files a/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-311.pyc and b/lib/python3.11/site-packages/pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/INSTALLER b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/LICENSE.txt b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/LICENSE.txt new file mode 100644 index 0000000..0a04128 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/LICENSE.txt @@ -0,0 +1,165 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/METADATA b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/METADATA new file mode 100644 index 0000000..d4fecdb --- /dev/null +++ b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/METADATA @@ -0,0 +1,93 @@ +Metadata-Version: 2.1 +Name: psycopg +Version: 3.1.9 +Summary: PostgreSQL database adapter for Python +Home-page: https://psycopg.org/psycopg3/ +Author: Daniele Varrazzo +Author-email: daniele.varrazzo@gmail.com +License: GNU Lesser General Public License v3 (LGPLv3) +Project-URL: Homepage, https://psycopg.org/ +Project-URL: Documentation, https://psycopg.org/psycopg3/docs/ +Project-URL: Changes, https://psycopg.org/psycopg3/docs/news.html +Project-URL: Code, https://github.com/psycopg/psycopg +Project-URL: Issue Tracker, https://github.com/psycopg/psycopg/issues +Project-URL: Download, https://pypi.org/project/psycopg/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3) +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Database +Classifier: Topic :: Database :: Front-Ends +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +Requires-Dist: typing-extensions (>=4.1) +Requires-Dist: backports.zoneinfo (>=0.2.0) ; python_version < "3.9" +Requires-Dist: tzdata ; sys_platform == "win32" +Provides-Extra: binary +Requires-Dist: psycopg-binary (==3.1.9) ; extra == 'binary' +Provides-Extra: c +Requires-Dist: psycopg-c (==3.1.9) ; extra == 'c' +Provides-Extra: dev +Requires-Dist: black (>=23.1.0) ; extra == 'dev' +Requires-Dist: dnspython (>=2.1) ; extra == 'dev' +Requires-Dist: flake8 (>=4.0) ; extra == 'dev' +Requires-Dist: mypy (>=1.2) ; extra == 'dev' +Requires-Dist: types-setuptools (>=57.4) ; extra == 'dev' +Requires-Dist: wheel (>=0.37) ; extra == 'dev' +Provides-Extra: docs +Requires-Dist: Sphinx (>=5.0) ; extra == 'docs' +Requires-Dist: furo (==2022.6.21) ; extra == 'docs' +Requires-Dist: sphinx-autobuild (>=2021.3.14) ; extra == 'docs' +Requires-Dist: sphinx-autodoc-typehints (>=1.12) ; extra == 'docs' +Provides-Extra: pool +Requires-Dist: psycopg-pool ; extra == 'pool' +Provides-Extra: test +Requires-Dist: anyio (>=3.6.2) ; extra == 'test' +Requires-Dist: mypy (>=1.2) ; extra == 'test' +Requires-Dist: pproxy (>=2.7) ; extra == 'test' +Requires-Dist: pytest (>=6.2.5) ; extra == 'test' +Requires-Dist: pytest-cov (>=3.0) ; extra == 'test' +Requires-Dist: pytest-randomly (>=3.5) ; extra == 'test' + +Psycopg 3: PostgreSQL database adapter for Python +================================================= + +Psycopg 3 is a modern implementation of a PostgreSQL adapter for Python. + +This distribution contains the pure Python package ``psycopg``. + + +Installation +------------ + +In short, run the following:: + + pip install --upgrade pip # to upgrade pip + pip install "psycopg[binary,pool]" # to install package and dependencies + +If something goes wrong, and for more information about installation, please +check out the `Installation documentation`__. + +.. __: https://www.psycopg.org/psycopg3/docs/basic/install.html# + + +Hacking +------- + +For development information check out `the project readme`__. + +.. __: https://github.com/psycopg/psycopg#readme + + +Copyright (C) 2020 The Psycopg Team diff --git a/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/RECORD b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/RECORD new file mode 100644 index 0000000..2434333 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/RECORD @@ -0,0 +1,132 @@ +psycopg-3.1.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +psycopg-3.1.9.dist-info/LICENSE.txt,sha256=46mU2C5kSwOnkqkw9XQAJlhBL2JAf1_uCD8lVcXyMRg,7652 +psycopg-3.1.9.dist-info/METADATA,sha256=G152cQnnElSmFSJEGnRFFsTI8MYV9OO0NFFXg76_aXw,3576 +psycopg-3.1.9.dist-info/RECORD,, +psycopg-3.1.9.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +psycopg-3.1.9.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +psycopg-3.1.9.dist-info/top_level.txt,sha256=npthKx2_OEoahttYqdsIPlRUgjS_bQBmfpDCVxQcPGo,8 +psycopg/__init__.py,sha256=IALSmAJ3hedCpA_mesbu-Df0jtdSj87uqkerLNmxpvA,3055 +psycopg/__pycache__/__init__.cpython-311.pyc,, +psycopg/__pycache__/_adapters_map.cpython-311.pyc,, +psycopg/__pycache__/_cmodule.cpython-311.pyc,, +psycopg/__pycache__/_column.cpython-311.pyc,, +psycopg/__pycache__/_compat.cpython-311.pyc,, +psycopg/__pycache__/_dns.cpython-311.pyc,, +psycopg/__pycache__/_encodings.cpython-311.pyc,, +psycopg/__pycache__/_enums.cpython-311.pyc,, +psycopg/__pycache__/_pipeline.cpython-311.pyc,, +psycopg/__pycache__/_preparing.cpython-311.pyc,, +psycopg/__pycache__/_queries.cpython-311.pyc,, +psycopg/__pycache__/_struct.cpython-311.pyc,, +psycopg/__pycache__/_tpc.cpython-311.pyc,, +psycopg/__pycache__/_transform.cpython-311.pyc,, +psycopg/__pycache__/_typeinfo.cpython-311.pyc,, +psycopg/__pycache__/_tz.cpython-311.pyc,, +psycopg/__pycache__/_wrappers.cpython-311.pyc,, +psycopg/__pycache__/abc.cpython-311.pyc,, +psycopg/__pycache__/adapt.cpython-311.pyc,, +psycopg/__pycache__/client_cursor.cpython-311.pyc,, +psycopg/__pycache__/connection.cpython-311.pyc,, +psycopg/__pycache__/connection_async.cpython-311.pyc,, +psycopg/__pycache__/conninfo.cpython-311.pyc,, +psycopg/__pycache__/copy.cpython-311.pyc,, +psycopg/__pycache__/cursor.cpython-311.pyc,, +psycopg/__pycache__/cursor_async.cpython-311.pyc,, +psycopg/__pycache__/dbapi20.cpython-311.pyc,, +psycopg/__pycache__/errors.cpython-311.pyc,, +psycopg/__pycache__/generators.cpython-311.pyc,, +psycopg/__pycache__/postgres.cpython-311.pyc,, +psycopg/__pycache__/rows.cpython-311.pyc,, +psycopg/__pycache__/server_cursor.cpython-311.pyc,, +psycopg/__pycache__/sql.cpython-311.pyc,, +psycopg/__pycache__/transaction.cpython-311.pyc,, +psycopg/__pycache__/version.cpython-311.pyc,, +psycopg/__pycache__/waiting.cpython-311.pyc,, +psycopg/_adapters_map.py,sha256=ZDS-g64MnEqkBzbKAhMdpGBNBfgSCaNAFtHpO5WpQew,10709 +psycopg/_cmodule.py,sha256=q6CxV-8shy72bscUFaeltbREcA8lUJ6IEFjuf4X8HfU,786 +psycopg/_column.py,sha256=TF_XEIpltbGMyXtLV2I1fL9Kgzi8KmQVOxWDedmb_nY,3803 +psycopg/_compat.py,sha256=7bA8L_rzgUZQ01_lJAs7kGj3ElgWFQtnbc4lKwUq2Lg,1763 +psycopg/_dns.py,sha256=tMxJCt1r-E-sSZjR1Yur66CaaCdUnaI4DHwSLES5Pzs,7316 +psycopg/_encodings.py,sha256=GTzriHVX0j1eDcR0gpeKp5zJaZkVe0I3a1vTiyVewXY,4518 +psycopg/_enums.py,sha256=GXJG0OqgQRaUOisysI3iWEvtGko8xrFJwAmnbehci-0,1678 +psycopg/_pipeline.py,sha256=1bFjypUcfewtmX7GVMhomNRz6rvsI2-O93HX3skMhkw,10223 +psycopg/_preparing.py,sha256=6jxCx5urk9JMsjp2PBYuq9pkSUL0aq3Z0CkhR08Mku8,6323 +psycopg/_queries.py,sha256=1Rt1E7ouEqA09kMjrVejunk85iZQMS8qCljCu4DAAhs,11627 +psycopg/_struct.py,sha256=jgJpHymLt899gaIPDlTUBkx3T6yTKBCr92DXR23loNY,2014 +psycopg/_tpc.py,sha256=3cWUSpUg0RgI_PEXCKmsr-ubHIKcqfsWX6MxOtKhUns,3583 +psycopg/_transform.py,sha256=Iz54c91djEfXWPHyAei1Z9-6ngGRGa6JCsGdtS0L318,11452 +psycopg/_typeinfo.py,sha256=PN_3o68sFGsk605wbQcy6Q1B1KzHT58ZFzqZmAAYTSw,14726 +psycopg/_tz.py,sha256=2O-tdDIVg1ckvbZ2W5gWX7O2O-RkotGEtCggn-NIT8U,1200 +psycopg/_wrappers.py,sha256=wKZt_ar8QuRgEKC6tuwamYXCwB1P7IZQsmgYdVOlAhU,3126 +psycopg/abc.py,sha256=kQ62n9TnSfZZjYrZN71CMNk9oRBIKKcWXIl-tLIInfk,7725 +psycopg/adapt.py,sha256=hnWdPy1ylx8gLMTBq99vevqmOhOyQJcDPnKuQ3SV3Vk,5307 +psycopg/client_cursor.py,sha256=F0yIS7YMDegIEtSIvzTxJ0naR6k00PWnevsxRcPoOcU,2815 +psycopg/connection.py,sha256=A5cYcH5pG5jJLSH7ddWXJXe5Ts801zDXc3BMrUNOuvs,33949 +psycopg/connection_async.py,sha256=bNL8Ic1IYHp4f2i20waPliiGWn7oySKCsvcDt6ykLGc,14051 +psycopg/conninfo.py,sha256=wbul7hMsnk38e1WBOEZ2LyYbcBXLQOyT9fAqABxiVeU,11906 +psycopg/copy.py,sha256=59LlWCNyosQgEr6F7xyWcRQbqQk6Nwhu313m58Ew0c8,29148 +psycopg/crdb/__init__.py,sha256=kyfsUIGuFvQTpLuXJcQ6gMpOB5WXeWtBa2wQjaqE3-g,396 +psycopg/crdb/__pycache__/__init__.cpython-311.pyc,, +psycopg/crdb/__pycache__/_types.cpython-311.pyc,, +psycopg/crdb/__pycache__/connection.cpython-311.pyc,, +psycopg/crdb/_types.py,sha256=ELUBFTS3UD_P2cIR68IVRZQ3Gv2CzJQvdbqt2LMB5Z4,5891 +psycopg/crdb/connection.py,sha256=qxXG1DqIro6ezbxyifOE55ZwKdWFMrtJyK05LNaIGSo,5394 +psycopg/cursor.py,sha256=16pdVsSeeBhtNftJUXho-nfGX68W6EiEyVv-mIuXnsw,30765 +psycopg/cursor_async.py,sha256=lQ7KsJHgCb_NnCOWnBq5tCv7drR4UP7_Q-FmMi6Z4KY,7924 +psycopg/dbapi20.py,sha256=-kzMT8Rn9-U2IMctFAEjVNlWFDvAHH_Q13B3k6T6TEo,3175 +psycopg/errors.py,sha256=vyKbMqtSqsE_c3UD2efvIjpn5zgOcwszpkf-CTv2J54,40714 +psycopg/generators.py,sha256=FVBEYg_9E_L-Gs2yoCk3MWJbHUvKLABQ4hEtL4fyRiM,9209 +psycopg/postgres.py,sha256=TN02kIF3l4wl5EFEHN-GoMSkcT_uBPaqlpFIY5xP634,4986 +psycopg/pq/__init__.py,sha256=1zGPgm9Zf31MfBaX7jz-t_F8YA1lRJNQx1rDJ1cnGAI,3880 +psycopg/pq/__pycache__/__init__.cpython-311.pyc,, +psycopg/pq/__pycache__/_debug.cpython-311.pyc,, +psycopg/pq/__pycache__/_enums.cpython-311.pyc,, +psycopg/pq/__pycache__/_pq_ctypes.cpython-311.pyc,, +psycopg/pq/__pycache__/abc.cpython-311.pyc,, +psycopg/pq/__pycache__/misc.cpython-311.pyc,, +psycopg/pq/__pycache__/pq_ctypes.cpython-311.pyc,, +psycopg/pq/_debug.py,sha256=jfIsSITc43a1AlNdaOd0r7UhbV6wc2ed1zBJ8n3Y4BA,3051 +psycopg/pq/_enums.py,sha256=a5crxkDrP6NMBcdYuGnoL-QExEg-qqe0QtNc9SFXt1U,5660 +psycopg/pq/_pq_ctypes.py,sha256=tr9BeMMEOnjCC8t4uZIxlyZJc-zKgztQ63hvGs0FcT4,20061 +psycopg/pq/abc.py,sha256=PGhs5_MOfYhyo23r8PLZxozK9bVH2ACe8c8TnewYPmw,7767 +psycopg/pq/misc.py,sha256=1VCWIvzebWNG1Ej7KJI5NpWsPFJReaJRGoM1VODpMJ0,3957 +psycopg/pq/pq_ctypes.py,sha256=oU085qsovMe453T-QM848j_ODetxeLagNpcTDpoL8RI,35791 +psycopg/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +psycopg/rows.py,sha256=2jh1xNMl5OQxVGxudP2H9iO9epDU0tNxe9qqufCRrq8,7449 +psycopg/server_cursor.py,sha256=fkOaZ_sTKmqVm1R190rKu1IcC3DcE7VI0Er0rsx4l68,15006 +psycopg/sql.py,sha256=1VlZ5esUXJYTU-h796FfnHJCnsiYPDcvrzjJfC_qlxo,15925 +psycopg/transaction.py,sha256=VYFmPcL9TV4OlNc3KpxC8f0CakG3wIVGYXnPc7G5O70,9426 +psycopg/types/__init__.py,sha256=JPAkV4EfkXwdsr76_U1DsQV19c8aQNSB6kgud4bhXeM,181 +psycopg/types/__pycache__/__init__.cpython-311.pyc,, +psycopg/types/__pycache__/array.cpython-311.pyc,, +psycopg/types/__pycache__/bool.cpython-311.pyc,, +psycopg/types/__pycache__/composite.cpython-311.pyc,, +psycopg/types/__pycache__/datetime.cpython-311.pyc,, +psycopg/types/__pycache__/enum.cpython-311.pyc,, +psycopg/types/__pycache__/hstore.cpython-311.pyc,, +psycopg/types/__pycache__/json.cpython-311.pyc,, +psycopg/types/__pycache__/multirange.cpython-311.pyc,, +psycopg/types/__pycache__/net.cpython-311.pyc,, +psycopg/types/__pycache__/none.cpython-311.pyc,, +psycopg/types/__pycache__/numeric.cpython-311.pyc,, +psycopg/types/__pycache__/range.cpython-311.pyc,, +psycopg/types/__pycache__/shapely.cpython-311.pyc,, +psycopg/types/__pycache__/string.cpython-311.pyc,, +psycopg/types/__pycache__/uuid.cpython-311.pyc,, +psycopg/types/array.py,sha256=e4gnMAWpjGaZ3-8N65Wm3JziPfwloKrI2dJYiqBc7lA,14643 +psycopg/types/bool.py,sha256=yLbDw8HCl26j7MFB3xlkaTdSK0pBrk2NKe5NuY4lc_w,1137 +psycopg/types/composite.py,sha256=nXzwxQUuTTXnlLRh7KZGwZb10fRo8f9q9_w8u14bKmc,9444 +psycopg/types/datetime.py,sha256=yoViTWpRkkdBImVebjwSjFufHLu9BBV_YIAuHRINzZ8,24788 +psycopg/types/enum.py,sha256=bAG0v03mUo2ts7Zb67mTY3eWta10nPOKNdczsQpytV4,5149 +psycopg/types/hstore.py,sha256=ZseqKnuB5w59JSw4bsshLSlwSVFJGiyGj-SE0yRc2gg,3718 +psycopg/types/json.py,sha256=Ru73V1w8q84Ojv769K3sI8e70YxfCGUbnkz5GPEiANM,6958 +psycopg/types/multirange.py,sha256=lMauuSHckhCSolt6HPs39CMHv515Rc6IbHK3eTmCxJs,16390 +psycopg/types/net.py,sha256=JIU0Odz8yJ_lbGanYoQ-kSrSF5k0lHYNn12wwn4uVho,6941 +psycopg/types/none.py,sha256=4b96Aw0WSW6Jpo_HMIvKUkivAxkxvHJzvzy-Q6Uf-60,613 +psycopg/types/numeric.py,sha256=RpeY0CfipDcR4t15sc3ZbytK90lFCLCJEOIhicMkpuk,13703 +psycopg/types/range.py,sha256=JshuZE2QKbDg6VBUIhmKAy3fAYDRWW3kcXctHn643Z4,20483 +psycopg/types/shapely.py,sha256=LkBsLkq_uJuzY6JzKR4vjJCGbaqRepAOjL_rSCiCvpM,2096 +psycopg/types/string.py,sha256=hWqmXSkX1da7Bg7JSuu-5f4ea6Ho3Aklz8lO89rfGzE,7662 +psycopg/types/uuid.py,sha256=5xqkVcZ22jX7a2oU9PXz_71y6VKpO5EYj8AITiSQNBE,1616 +psycopg/version.py,sha256=Li_I4PXX5XHaj8jiJpD4-vrNDBlq0PyZ3xJ2Ir_5USc,375 +psycopg/waiting.py,sha256=ki6TLORe7n9cNwpZWLjDA2kmoh9cOpckb-M7gRLAvKc,10011 diff --git a/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/REQUESTED b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/WHEEL b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/WHEEL new file mode 100644 index 0000000..1f37c02 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/top_level.txt b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/top_level.txt new file mode 100644 index 0000000..296ca87 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg-3.1.9.dist-info/top_level.txt @@ -0,0 +1 @@ +psycopg diff --git a/lib/python3.11/site-packages/psycopg/__init__.py b/lib/python3.11/site-packages/psycopg/__init__.py new file mode 100644 index 0000000..baadf30 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/__init__.py @@ -0,0 +1,110 @@ +""" +psycopg -- PostgreSQL database adapter for Python +""" + +# Copyright (C) 2020 The Psycopg Team + +import logging + +from . import pq # noqa: F401 import early to stabilize side effects +from . import types +from . import postgres +from ._tpc import Xid +from .copy import Copy, AsyncCopy +from ._enums import IsolationLevel +from .cursor import Cursor +from .errors import Warning, Error, InterfaceError, DatabaseError +from .errors import DataError, OperationalError, IntegrityError +from .errors import InternalError, ProgrammingError, NotSupportedError +from ._column import Column +from .conninfo import ConnectionInfo +from ._pipeline import Pipeline, AsyncPipeline +from .connection import BaseConnection, Connection, Notify +from .transaction import Rollback, Transaction, AsyncTransaction +from .cursor_async import AsyncCursor +from .server_cursor import AsyncServerCursor, ServerCursor +from .client_cursor import AsyncClientCursor, ClientCursor +from .connection_async import AsyncConnection + +from . import dbapi20 +from .dbapi20 import BINARY, DATETIME, NUMBER, ROWID, STRING +from .dbapi20 import Binary, Date, DateFromTicks, Time, TimeFromTicks +from .dbapi20 import Timestamp, TimestampFromTicks + +from .version import __version__ as __version__ # noqa: F401 + +# Set the logger to a quiet default, can be enabled if needed +logger = logging.getLogger("psycopg") +if logger.level == logging.NOTSET: + logger.setLevel(logging.WARNING) + +# DBAPI compliance +connect = Connection.connect +apilevel = "2.0" +threadsafety = 2 +paramstyle = "pyformat" + +# register default adapters for PostgreSQL +adapters = postgres.adapters # exposed by the package +postgres.register_default_adapters(adapters) + +# After the default ones, because these can deal with the bytea oid better +dbapi20.register_dbapi20_adapters(adapters) + +# Must come after all the types have been registered +types.array.register_all_arrays(adapters) + +# Note: defining the exported methods helps both Sphynx in documenting that +# this is the canonical place to obtain them and should be used by MyPy too, +# so that function signatures are consistent with the documentation. +__all__ = [ + "AsyncClientCursor", + "AsyncConnection", + "AsyncCopy", + "AsyncCursor", + "AsyncPipeline", + "AsyncServerCursor", + "AsyncTransaction", + "BaseConnection", + "ClientCursor", + "Column", + "Connection", + "ConnectionInfo", + "Copy", + "Cursor", + "IsolationLevel", + "Notify", + "Pipeline", + "Rollback", + "ServerCursor", + "Transaction", + "Xid", + # DBAPI exports + "connect", + "apilevel", + "threadsafety", + "paramstyle", + "Warning", + "Error", + "InterfaceError", + "DatabaseError", + "DataError", + "OperationalError", + "IntegrityError", + "InternalError", + "ProgrammingError", + "NotSupportedError", + # DBAPI type constructors and singletons + "Binary", + "Date", + "DateFromTicks", + "Time", + "TimeFromTicks", + "Timestamp", + "TimestampFromTicks", + "BINARY", + "DATETIME", + "NUMBER", + "ROWID", + "STRING", +] diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..48077b5 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_adapters_map.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_adapters_map.cpython-311.pyc new file mode 100644 index 0000000..7b807cb Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_adapters_map.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_cmodule.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_cmodule.cpython-311.pyc new file mode 100644 index 0000000..c35d9be Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_cmodule.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_column.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_column.cpython-311.pyc new file mode 100644 index 0000000..b12448a Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_column.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_compat.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_compat.cpython-311.pyc new file mode 100644 index 0000000..b0bc597 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_compat.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_dns.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_dns.cpython-311.pyc new file mode 100644 index 0000000..edd48a5 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_dns.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_encodings.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_encodings.cpython-311.pyc new file mode 100644 index 0000000..5dfbdc7 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_encodings.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_enums.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_enums.cpython-311.pyc new file mode 100644 index 0000000..3da5c6d Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_enums.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_pipeline.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_pipeline.cpython-311.pyc new file mode 100644 index 0000000..a8a563a Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_pipeline.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_preparing.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_preparing.cpython-311.pyc new file mode 100644 index 0000000..e02768e Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_preparing.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_queries.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_queries.cpython-311.pyc new file mode 100644 index 0000000..ed71167 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_queries.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_struct.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_struct.cpython-311.pyc new file mode 100644 index 0000000..8e7b98c Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_struct.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_tpc.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_tpc.cpython-311.pyc new file mode 100644 index 0000000..abd5724 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_tpc.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_transform.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_transform.cpython-311.pyc new file mode 100644 index 0000000..238600c Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_transform.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_typeinfo.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_typeinfo.cpython-311.pyc new file mode 100644 index 0000000..ef95ff3 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_typeinfo.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_tz.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_tz.cpython-311.pyc new file mode 100644 index 0000000..dae9163 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_tz.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/_wrappers.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/_wrappers.cpython-311.pyc new file mode 100644 index 0000000..a04b645 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/_wrappers.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/abc.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/abc.cpython-311.pyc new file mode 100644 index 0000000..e37656f Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/abc.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/adapt.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/adapt.cpython-311.pyc new file mode 100644 index 0000000..59d0524 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/adapt.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/client_cursor.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/client_cursor.cpython-311.pyc new file mode 100644 index 0000000..8cef469 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/client_cursor.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/connection.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/connection.cpython-311.pyc new file mode 100644 index 0000000..29ac0ad Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/connection.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/connection_async.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/connection_async.cpython-311.pyc new file mode 100644 index 0000000..c073379 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/connection_async.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/conninfo.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/conninfo.cpython-311.pyc new file mode 100644 index 0000000..58fb2ba Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/conninfo.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/copy.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/copy.cpython-311.pyc new file mode 100644 index 0000000..e00c0d1 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/copy.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/cursor.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/cursor.cpython-311.pyc new file mode 100644 index 0000000..5bf2161 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/cursor.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/cursor_async.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/cursor_async.cpython-311.pyc new file mode 100644 index 0000000..ab82086 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/cursor_async.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/dbapi20.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/dbapi20.cpython-311.pyc new file mode 100644 index 0000000..974e966 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/dbapi20.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/errors.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/errors.cpython-311.pyc new file mode 100644 index 0000000..3f16adf Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/errors.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/generators.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/generators.cpython-311.pyc new file mode 100644 index 0000000..d97fd2d Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/generators.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/postgres.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/postgres.cpython-311.pyc new file mode 100644 index 0000000..66175fb Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/postgres.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/rows.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/rows.cpython-311.pyc new file mode 100644 index 0000000..3c37fd0 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/rows.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/server_cursor.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/server_cursor.cpython-311.pyc new file mode 100644 index 0000000..ab10d07 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/server_cursor.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/sql.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/sql.cpython-311.pyc new file mode 100644 index 0000000..6804087 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/sql.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/transaction.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/transaction.cpython-311.pyc new file mode 100644 index 0000000..fc37514 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/transaction.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/version.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/version.cpython-311.pyc new file mode 100644 index 0000000..25393f4 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/version.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/__pycache__/waiting.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/__pycache__/waiting.cpython-311.pyc new file mode 100644 index 0000000..ac862f6 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/__pycache__/waiting.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/_adapters_map.py b/lib/python3.11/site-packages/psycopg/_adapters_map.py new file mode 100644 index 0000000..70bf4cc --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_adapters_map.py @@ -0,0 +1,295 @@ +""" +Mapping from types/oids to Dumpers/Loaders +""" + +# Copyright (C) 2020 The Psycopg Team + +from typing import Any, Dict, List, Optional, Type, TypeVar, Union +from typing import cast, TYPE_CHECKING + +from . import pq +from . import errors as e +from .abc import Dumper, Loader +from ._enums import PyFormat as PyFormat +from ._cmodule import _psycopg +from ._typeinfo import TypesRegistry + +if TYPE_CHECKING: + from .connection import BaseConnection + +RV = TypeVar("RV") + + +class AdaptersMap: + r""" + Establish how types should be converted between Python and PostgreSQL in + an `~psycopg.abc.AdaptContext`. + + `!AdaptersMap` maps Python types to `~psycopg.adapt.Dumper` classes to + define how Python types are converted to PostgreSQL, and maps OIDs to + `~psycopg.adapt.Loader` classes to establish how query results are + converted to Python. + + Every `!AdaptContext` object has an underlying `!AdaptersMap` defining how + types are converted in that context, exposed as the + `~psycopg.abc.AdaptContext.adapters` attribute: changing such map allows + to customise adaptation in a context without changing separated contexts. + + When a context is created from another context (for instance when a + `~psycopg.Cursor` is created from a `~psycopg.Connection`), the parent's + `!adapters` are used as template for the child's `!adapters`, so that every + cursor created from the same connection use the connection's types + configuration, but separate connections have independent mappings. + + Once created, `!AdaptersMap` are independent. This means that objects + already created are not affected if a wider scope (e.g. the global one) is + changed. + + The connections adapters are initialised using a global `!AdptersMap` + template, exposed as `psycopg.adapters`: changing such mapping allows to + customise the type mapping for every connections created afterwards. + + The object can start empty or copy from another object of the same class. + Copies are copy-on-write: if the maps are updated make a copy. This way + extending e.g. global map by a connection or a connection map from a cursor + is cheap: a copy is only made on customisation. + """ + + __module__ = "psycopg.adapt" + + types: TypesRegistry + + _dumpers: Dict[PyFormat, Dict[Union[type, str], Type[Dumper]]] + _dumpers_by_oid: List[Dict[int, Type[Dumper]]] + _loaders: List[Dict[int, Type[Loader]]] + + # Record if a dumper or loader has an optimised version. + _optimised: Dict[type, type] = {} + + def __init__( + self, + template: Optional["AdaptersMap"] = None, + types: Optional[TypesRegistry] = None, + ): + if template: + self._dumpers = template._dumpers.copy() + self._own_dumpers = _dumpers_shared.copy() + template._own_dumpers = _dumpers_shared.copy() + + self._dumpers_by_oid = template._dumpers_by_oid[:] + self._own_dumpers_by_oid = [False, False] + template._own_dumpers_by_oid = [False, False] + + self._loaders = template._loaders[:] + self._own_loaders = [False, False] + template._own_loaders = [False, False] + + self.types = TypesRegistry(template.types) + + else: + self._dumpers = {fmt: {} for fmt in PyFormat} + self._own_dumpers = _dumpers_owned.copy() + + self._dumpers_by_oid = [{}, {}] + self._own_dumpers_by_oid = [True, True] + + self._loaders = [{}, {}] + self._own_loaders = [True, True] + + self.types = types or TypesRegistry() + + # implement the AdaptContext protocol too + @property + def adapters(self) -> "AdaptersMap": + return self + + @property + def connection(self) -> Optional["BaseConnection[Any]"]: + return None + + def register_dumper( + self, cls: Union[type, str, None], dumper: Type[Dumper] + ) -> None: + """ + Configure the context to use `!dumper` to convert objects of type `!cls`. + + If two dumpers with different `~Dumper.format` are registered for the + same type, the last one registered will be chosen when the query + doesn't specify a format (i.e. when the value is used with a ``%s`` + "`~PyFormat.AUTO`" placeholder). + + :param cls: The type to manage. + :param dumper: The dumper to register for `!cls`. + + If `!cls` is specified as string it will be lazy-loaded, so that it + will be possible to register it without importing it before. In this + case it should be the fully qualified name of the object (e.g. + ``"uuid.UUID"``). + + If `!cls` is None, only use the dumper when looking up using + `get_dumper_by_oid()`, which happens when we know the Postgres type to + adapt to, but not the Python type that will be adapted (e.g. in COPY + after using `~psycopg.Copy.set_types()`). + + """ + if not (cls is None or isinstance(cls, (str, type))): + raise TypeError( + f"dumpers should be registered on classes, got {cls} instead" + ) + + if _psycopg: + dumper = self._get_optimised(dumper) + + # Register the dumper both as its format and as auto + # so that the last dumper registered is used in auto (%s) format + if cls: + for fmt in (PyFormat.from_pq(dumper.format), PyFormat.AUTO): + if not self._own_dumpers[fmt]: + self._dumpers[fmt] = self._dumpers[fmt].copy() + self._own_dumpers[fmt] = True + + self._dumpers[fmt][cls] = dumper + + # Register the dumper by oid, if the oid of the dumper is fixed + if dumper.oid: + if not self._own_dumpers_by_oid[dumper.format]: + self._dumpers_by_oid[dumper.format] = self._dumpers_by_oid[ + dumper.format + ].copy() + self._own_dumpers_by_oid[dumper.format] = True + + self._dumpers_by_oid[dumper.format][dumper.oid] = dumper + + def register_loader(self, oid: Union[int, str], loader: Type["Loader"]) -> None: + """ + Configure the context to use `!loader` to convert data of oid `!oid`. + + :param oid: The PostgreSQL OID or type name to manage. + :param loader: The loar to register for `!oid`. + + If `oid` is specified as string, it refers to a type name, which is + looked up in the `types` registry. ` + + """ + if isinstance(oid, str): + oid = self.types[oid].oid + if not isinstance(oid, int): + raise TypeError(f"loaders should be registered on oid, got {oid} instead") + + if _psycopg: + loader = self._get_optimised(loader) + + fmt = loader.format + if not self._own_loaders[fmt]: + self._loaders[fmt] = self._loaders[fmt].copy() + self._own_loaders[fmt] = True + + self._loaders[fmt][oid] = loader + + def get_dumper(self, cls: type, format: PyFormat) -> Type["Dumper"]: + """ + Return the dumper class for the given type and format. + + Raise `~psycopg.ProgrammingError` if a class is not available. + + :param cls: The class to adapt. + :param format: The format to dump to. If `~psycopg.adapt.PyFormat.AUTO`, + use the last one of the dumpers registered on `!cls`. + """ + try: + # Fast path: the class has a known dumper. + return self._dumpers[format][cls] + except KeyError: + if format not in self._dumpers: + raise ValueError(f"bad dumper format: {format}") + + # If the KeyError was caused by cls missing from dmap, let's + # look for different cases. + dmap = self._dumpers[format] + + # Look for the right class, including looking at superclasses + for scls in cls.__mro__: + if scls in dmap: + return dmap[scls] + + # If the adapter is not found, look for its name as a string + fqn = scls.__module__ + "." + scls.__qualname__ + if fqn in dmap: + # Replace the class name with the class itself + d = dmap[scls] = dmap.pop(fqn) + return d + + raise e.ProgrammingError( + f"cannot adapt type {cls.__name__!r} using placeholder '%{format}'" + f" (format: {PyFormat(format).name})" + ) + + def get_dumper_by_oid(self, oid: int, format: pq.Format) -> Type["Dumper"]: + """ + Return the dumper class for the given oid and format. + + Raise `~psycopg.ProgrammingError` if a class is not available. + + :param oid: The oid of the type to dump to. + :param format: The format to dump to. + """ + try: + dmap = self._dumpers_by_oid[format] + except KeyError: + raise ValueError(f"bad dumper format: {format}") + + try: + return dmap[oid] + except KeyError: + info = self.types.get(oid) + if info: + msg = ( + f"cannot find a dumper for type {info.name} (oid {oid})" + f" format {pq.Format(format).name}" + ) + else: + msg = ( + f"cannot find a dumper for unknown type with oid {oid}" + f" format {pq.Format(format).name}" + ) + raise e.ProgrammingError(msg) + + def get_loader(self, oid: int, format: pq.Format) -> Optional[Type["Loader"]]: + """ + Return the loader class for the given oid and format. + + Return `!None` if not found. + + :param oid: The oid of the type to load. + :param format: The format to load from. + """ + return self._loaders[format].get(oid) + + @classmethod + def _get_optimised(self, cls: Type[RV]) -> Type[RV]: + """Return the optimised version of a Dumper or Loader class. + + Return the input class itself if there is no optimised version. + """ + try: + return self._optimised[cls] + except KeyError: + pass + + # Check if the class comes from psycopg.types and there is a class + # with the same name in psycopg_c._psycopg. + from psycopg import types + + if cls.__module__.startswith(types.__name__): + new = cast(Type[RV], getattr(_psycopg, cls.__name__, None)) + if new: + self._optimised[cls] = new + return new + + self._optimised[cls] = cls + return cls + + +# Micro-optimization: copying these objects is faster than creating new dicts +_dumpers_owned = dict.fromkeys(PyFormat, True) +_dumpers_shared = dict.fromkeys(PyFormat, False) diff --git a/lib/python3.11/site-packages/psycopg/_cmodule.py b/lib/python3.11/site-packages/psycopg/_cmodule.py new file mode 100644 index 0000000..288ef1b --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_cmodule.py @@ -0,0 +1,24 @@ +""" +Simplify access to the _psycopg module +""" + +# Copyright (C) 2021 The Psycopg Team + +from typing import Optional + +from . import pq + +__version__: Optional[str] = None + +# Note: "c" must the first attempt so that mypy associates the variable the +# right module interface. It will not result Optional, but hey. +if pq.__impl__ == "c": + from psycopg_c import _psycopg as _psycopg + from psycopg_c import __version__ as __version__ # noqa: F401 +elif pq.__impl__ == "binary": + from psycopg_binary import _psycopg as _psycopg # type: ignore + from psycopg_binary import __version__ as __version__ # type: ignore # noqa: F401 +elif pq.__impl__ == "python": + _psycopg = None # type: ignore +else: + raise ImportError(f"can't find _psycopg optimised module in {pq.__impl__!r}") diff --git a/lib/python3.11/site-packages/psycopg/_column.py b/lib/python3.11/site-packages/psycopg/_column.py new file mode 100644 index 0000000..50577e6 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_column.py @@ -0,0 +1,142 @@ +""" +The Column object in Cursor.description +""" + +# Copyright (C) 2020 The Psycopg Team + +from typing import Any, NamedTuple, Optional, Sequence, TYPE_CHECKING +from operator import attrgetter + +if TYPE_CHECKING: + from .cursor import BaseCursor + + +class ColumnData(NamedTuple): + ftype: int + fmod: int + fsize: int + + +class Column(Sequence[Any]): + __module__ = "psycopg" + + def __init__(self, cursor: "BaseCursor[Any, Any]", index: int): + res = cursor.pgresult + assert res + + fname = res.fname(index) + if fname: + self._name = fname.decode(cursor._encoding) + else: + # COPY_OUT results have columns but no name + self._name = f"column_{index + 1}" + + self._data = ColumnData( + ftype=res.ftype(index), + fmod=res.fmod(index), + fsize=res.fsize(index), + ) + self._type = cursor.adapters.types.get(self._data.ftype) + + _attrs = tuple( + attrgetter(attr) + for attr in """ + name type_code display_size internal_size precision scale null_ok + """.split() + ) + + def __repr__(self) -> str: + return ( + f"" + ) + + def __len__(self) -> int: + return 7 + + def _type_display(self) -> str: + parts = [] + parts.append(self._type.name if self._type else str(self.type_code)) + + mod1 = self.precision + if mod1 is None: + mod1 = self.display_size + if mod1: + parts.append(f"({mod1}") + if self.scale: + parts.append(f", {self.scale}") + parts.append(")") + + if self._type and self.type_code == self._type.array_oid: + parts.append("[]") + + return "".join(parts) + + def __getitem__(self, index: Any) -> Any: + if isinstance(index, slice): + return tuple(getter(self) for getter in self._attrs[index]) + else: + return self._attrs[index](self) + + @property + def name(self) -> str: + """The name of the column.""" + return self._name + + @property + def type_code(self) -> int: + """The numeric OID of the column.""" + return self._data.ftype + + @property + def display_size(self) -> Optional[int]: + """The field size, for :sql:`varchar(n)`, None otherwise.""" + if not self._type: + return None + + if self._type.name in ("varchar", "char"): + fmod = self._data.fmod + if fmod >= 0: + return fmod - 4 + + return None + + @property + def internal_size(self) -> Optional[int]: + """The internal field size for fixed-size types, None otherwise.""" + fsize = self._data.fsize + return fsize if fsize >= 0 else None + + @property + def precision(self) -> Optional[int]: + """The number of digits for fixed precision types.""" + if not self._type: + return None + + dttypes = ("time", "timetz", "timestamp", "timestamptz", "interval") + if self._type.name == "numeric": + fmod = self._data.fmod + if fmod >= 0: + return fmod >> 16 + + elif self._type.name in dttypes: + fmod = self._data.fmod + if fmod >= 0: + return fmod & 0xFFFF + + return None + + @property + def scale(self) -> Optional[int]: + """The number of digits after the decimal point if available.""" + if self._type and self._type.name == "numeric": + fmod = self._data.fmod - 4 + if fmod >= 0: + return fmod & 0xFFFF + + return None + + @property + def null_ok(self) -> Optional[bool]: + """Always `!None`""" + return None diff --git a/lib/python3.11/site-packages/psycopg/_compat.py b/lib/python3.11/site-packages/psycopg/_compat.py new file mode 100644 index 0000000..7dbae79 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_compat.py @@ -0,0 +1,72 @@ +""" +compatibility functions for different Python versions +""" + +# Copyright (C) 2021 The Psycopg Team + +import sys +import asyncio +from typing import Any, Awaitable, Generator, Optional, Sequence, Union, TypeVar + +# NOTE: TypeAlias cannot be exported by this module, as pyright special-cases it. +# For this raisin it must be imported directly from typing_extension where used. +# See https://github.com/microsoft/pyright/issues/4197 +from typing_extensions import TypeAlias + +if sys.version_info >= (3, 8): + from typing import Protocol +else: + from typing_extensions import Protocol + +T = TypeVar("T") +FutureT: TypeAlias = Union["asyncio.Future[T]", Generator[Any, None, T], Awaitable[T]] + +if sys.version_info >= (3, 8): + create_task = asyncio.create_task + from math import prod + +else: + + def create_task( + coro: FutureT[T], name: Optional[str] = None + ) -> "asyncio.Future[T]": + return asyncio.create_task(coro) + + from functools import reduce + + def prod(seq: Sequence[int]) -> int: + return reduce(int.__mul__, seq, 1) + + +if sys.version_info >= (3, 9): + from zoneinfo import ZoneInfo + from functools import cache + from collections import Counter, deque as Deque +else: + from typing import Counter, Deque + from functools import lru_cache + from backports.zoneinfo import ZoneInfo + + cache = lru_cache(maxsize=None) + +if sys.version_info >= (3, 10): + from typing import TypeGuard +else: + from typing_extensions import TypeGuard + +if sys.version_info >= (3, 11): + from typing import LiteralString +else: + from typing_extensions import LiteralString + +__all__ = [ + "Counter", + "Deque", + "LiteralString", + "Protocol", + "TypeGuard", + "ZoneInfo", + "cache", + "create_task", + "prod", +] diff --git a/lib/python3.11/site-packages/psycopg/_dns.py b/lib/python3.11/site-packages/psycopg/_dns.py new file mode 100644 index 0000000..1e146ba --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_dns.py @@ -0,0 +1,223 @@ +# type: ignore # dnspython is currently optional and mypy fails if missing +""" +DNS query support +""" + +# Copyright (C) 2021 The Psycopg Team + +import os +import re +import warnings +from random import randint +from typing import Any, DefaultDict, Dict, List, NamedTuple, Optional, Sequence +from typing import TYPE_CHECKING +from collections import defaultdict + +try: + from dns.resolver import Resolver, Cache + from dns.asyncresolver import Resolver as AsyncResolver + from dns.exception import DNSException +except ImportError: + raise ImportError( + "the module psycopg._dns requires the package 'dnspython' installed" + ) + +from . import errors as e +from .conninfo import resolve_hostaddr_async as resolve_hostaddr_async_ + +if TYPE_CHECKING: + from dns.rdtypes.IN.SRV import SRV + +resolver = Resolver() +resolver.cache = Cache() + +async_resolver = AsyncResolver() +async_resolver.cache = Cache() + + +async def resolve_hostaddr_async(params: Dict[str, Any]) -> Dict[str, Any]: + """ + Perform async DNS lookup of the hosts and return a new params dict. + + .. deprecated:: 3.1 + The use of this function is not necessary anymore, because + `psycopg.AsyncConnection.connect()` performs non-blocking name + resolution automatically. + """ + warnings.warn( + "from psycopg 3.1, resolve_hostaddr_async() is not needed anymore", + DeprecationWarning, + ) + return await resolve_hostaddr_async_(params) + + +def resolve_srv(params: Dict[str, Any]) -> Dict[str, Any]: + """Apply SRV DNS lookup as defined in :RFC:`2782`.""" + return Rfc2782Resolver().resolve(params) + + +async def resolve_srv_async(params: Dict[str, Any]) -> Dict[str, Any]: + """Async equivalent of `resolve_srv()`.""" + return await Rfc2782Resolver().resolve_async(params) + + +class HostPort(NamedTuple): + host: str + port: str + totry: bool = False + target: Optional[str] = None + + +class Rfc2782Resolver: + """Implement SRV RR Resolution as per RFC 2782 + + The class is organised to minimise code duplication between the sync and + the async paths. + """ + + re_srv_rr = re.compile(r"^(?P_[^\.]+)\.(?P_[^\.]+)\.(?P.+)") + + def resolve(self, params: Dict[str, Any]) -> Dict[str, Any]: + """Update the parameters host and port after SRV lookup.""" + attempts = self._get_attempts(params) + if not attempts: + return params + + hps = [] + for hp in attempts: + if hp.totry: + hps.extend(self._resolve_srv(hp)) + else: + hps.append(hp) + + return self._return_params(params, hps) + + async def resolve_async(self, params: Dict[str, Any]) -> Dict[str, Any]: + """Update the parameters host and port after SRV lookup.""" + attempts = self._get_attempts(params) + if not attempts: + return params + + hps = [] + for hp in attempts: + if hp.totry: + hps.extend(await self._resolve_srv_async(hp)) + else: + hps.append(hp) + + return self._return_params(params, hps) + + def _get_attempts(self, params: Dict[str, Any]) -> List[HostPort]: + """ + Return the list of host, and for each host if SRV lookup must be tried. + + Return an empty list if no lookup is requested. + """ + # If hostaddr is defined don't do any resolution. + if params.get("hostaddr", os.environ.get("PGHOSTADDR", "")): + return [] + + host_arg: str = params.get("host", os.environ.get("PGHOST", "")) + hosts_in = host_arg.split(",") + port_arg: str = str(params.get("port", os.environ.get("PGPORT", ""))) + ports_in = port_arg.split(",") + + if len(ports_in) == 1: + # If only one port is specified, it applies to all the hosts. + ports_in *= len(hosts_in) + if len(ports_in) != len(hosts_in): + # ProgrammingError would have been more appropriate, but this is + # what the raise if the libpq fails connect in the same case. + raise e.OperationalError( + f"cannot match {len(hosts_in)} hosts with {len(ports_in)} port numbers" + ) + + out = [] + srv_found = False + for host, port in zip(hosts_in, ports_in): + m = self.re_srv_rr.match(host) + if m or port.lower() == "srv": + srv_found = True + target = m.group("target") if m else None + hp = HostPort(host=host, port=port, totry=True, target=target) + else: + hp = HostPort(host=host, port=port) + out.append(hp) + + return out if srv_found else [] + + def _resolve_srv(self, hp: HostPort) -> List[HostPort]: + try: + ans = resolver.resolve(hp.host, "SRV") + except DNSException: + ans = () + return self._get_solved_entries(hp, ans) + + async def _resolve_srv_async(self, hp: HostPort) -> List[HostPort]: + try: + ans = await async_resolver.resolve(hp.host, "SRV") + except DNSException: + ans = () + return self._get_solved_entries(hp, ans) + + def _get_solved_entries( + self, hp: HostPort, entries: "Sequence[SRV]" + ) -> List[HostPort]: + if not entries: + # No SRV entry found. Delegate the libpq a QNAME=target lookup + if hp.target and hp.port.lower() != "srv": + return [HostPort(host=hp.target, port=hp.port)] + else: + return [] + + # If there is precisely one SRV RR, and its Target is "." (the root + # domain), abort. + if len(entries) == 1 and str(entries[0].target) == ".": + return [] + + return [ + HostPort(host=str(entry.target).rstrip("."), port=str(entry.port)) + for entry in self.sort_rfc2782(entries) + ] + + def _return_params( + self, params: Dict[str, Any], hps: List[HostPort] + ) -> Dict[str, Any]: + if not hps: + # Nothing found, we ended up with an empty list + raise e.OperationalError("no host found after SRV RR lookup") + + out = params.copy() + out["host"] = ",".join(hp.host for hp in hps) + out["port"] = ",".join(str(hp.port) for hp in hps) + return out + + def sort_rfc2782(self, ans: "Sequence[SRV]") -> "List[SRV]": + """ + Implement the priority/weight ordering defined in RFC 2782. + """ + # Divide the entries by priority: + priorities: DefaultDict[int, "List[SRV]"] = defaultdict(list) + out: "List[SRV]" = [] + for entry in ans: + priorities[entry.priority].append(entry) + + for pri, entries in sorted(priorities.items()): + if len(entries) == 1: + out.append(entries[0]) + continue + + entries.sort(key=lambda ent: ent.weight) + total_weight = sum(ent.weight for ent in entries) + while entries: + r = randint(0, total_weight) + csum = 0 + for i, ent in enumerate(entries): + csum += ent.weight + if csum >= r: + break + out.append(ent) + total_weight -= ent.weight + del entries[i] + + return out diff --git a/lib/python3.11/site-packages/psycopg/_encodings.py b/lib/python3.11/site-packages/psycopg/_encodings.py new file mode 100644 index 0000000..876acb9 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_encodings.py @@ -0,0 +1,170 @@ +""" +Mappings between PostgreSQL and Python encodings. +""" + +# Copyright (C) 2020 The Psycopg Team + +import re +import string +import codecs +from typing import Any, Dict, Optional, TYPE_CHECKING + +from .pq._enums import ConnStatus +from .errors import NotSupportedError +from ._compat import cache + +if TYPE_CHECKING: + from .pq.abc import PGconn + from .connection import BaseConnection + +OK = ConnStatus.OK + + +_py_codecs = { + "BIG5": "big5", + "EUC_CN": "gb2312", + "EUC_JIS_2004": "euc_jis_2004", + "EUC_JP": "euc_jp", + "EUC_KR": "euc_kr", + # "EUC_TW": not available in Python + "GB18030": "gb18030", + "GBK": "gbk", + "ISO_8859_5": "iso8859-5", + "ISO_8859_6": "iso8859-6", + "ISO_8859_7": "iso8859-7", + "ISO_8859_8": "iso8859-8", + "JOHAB": "johab", + "KOI8R": "koi8-r", + "KOI8U": "koi8-u", + "LATIN1": "iso8859-1", + "LATIN10": "iso8859-16", + "LATIN2": "iso8859-2", + "LATIN3": "iso8859-3", + "LATIN4": "iso8859-4", + "LATIN5": "iso8859-9", + "LATIN6": "iso8859-10", + "LATIN7": "iso8859-13", + "LATIN8": "iso8859-14", + "LATIN9": "iso8859-15", + # "MULE_INTERNAL": not available in Python + "SHIFT_JIS_2004": "shift_jis_2004", + "SJIS": "shift_jis", + # this actually means no encoding, see PostgreSQL docs + # it is special-cased by the text loader. + "SQL_ASCII": "ascii", + "UHC": "cp949", + "UTF8": "utf-8", + "WIN1250": "cp1250", + "WIN1251": "cp1251", + "WIN1252": "cp1252", + "WIN1253": "cp1253", + "WIN1254": "cp1254", + "WIN1255": "cp1255", + "WIN1256": "cp1256", + "WIN1257": "cp1257", + "WIN1258": "cp1258", + "WIN866": "cp866", + "WIN874": "cp874", +} + +py_codecs: Dict[bytes, str] = {} +py_codecs.update((k.encode(), v) for k, v in _py_codecs.items()) + +# Add an alias without underscore, for lenient lookups +py_codecs.update( + (k.replace("_", "").encode(), v) for k, v in _py_codecs.items() if "_" in k +) + +pg_codecs = {v: k.encode() for k, v in _py_codecs.items()} + + +def conn_encoding(conn: "Optional[BaseConnection[Any]]") -> str: + """ + Return the Python encoding name of a psycopg connection. + + Default to utf8 if the connection has no encoding info. + """ + if not conn or conn.closed: + return "utf-8" + + pgenc = conn.pgconn.parameter_status(b"client_encoding") or b"UTF8" + return pg2pyenc(pgenc) + + +def pgconn_encoding(pgconn: "PGconn") -> str: + """ + Return the Python encoding name of a libpq connection. + + Default to utf8 if the connection has no encoding info. + """ + if pgconn.status != OK: + return "utf-8" + + pgenc = pgconn.parameter_status(b"client_encoding") or b"UTF8" + return pg2pyenc(pgenc) + + +def conninfo_encoding(conninfo: str) -> str: + """ + Return the Python encoding name passed in a conninfo string. Default to utf8. + + Because the input is likely to come from the user and not normalised by the + server, be somewhat lenient (non-case-sensitive lookup, ignore noise chars). + """ + from .conninfo import conninfo_to_dict + + params = conninfo_to_dict(conninfo) + pgenc = params.get("client_encoding") + if pgenc: + try: + return pg2pyenc(pgenc.encode()) + except NotSupportedError: + pass + + return "utf-8" + + +@cache +def py2pgenc(name: str) -> bytes: + """Convert a Python encoding name to PostgreSQL encoding name. + + Raise LookupError if the Python encoding is unknown. + """ + return pg_codecs[codecs.lookup(name).name] + + +@cache +def pg2pyenc(name: bytes) -> str: + """Convert a PostgreSQL encoding name to Python encoding name. + + Raise NotSupportedError if the PostgreSQL encoding is not supported by + Python. + """ + try: + return py_codecs[name.replace(b"-", b"").replace(b"_", b"").upper()] + except KeyError: + sname = name.decode("utf8", "replace") + raise NotSupportedError(f"codec not available in Python: {sname!r}") + + +def _as_python_identifier(s: str, prefix: str = "f") -> str: + """ + Reduce a string to a valid Python identifier. + + Replace all non-valid chars with '_' and prefix the value with `!prefix` if + the first letter is an '_'. + """ + if not s.isidentifier(): + if s[0] in "1234567890": + s = prefix + s + if not s.isidentifier(): + s = _re_clean.sub("_", s) + # namedtuple fields cannot start with underscore. So... + if s[0] == "_": + s = prefix + s + return s + + +_re_clean = re.compile( + f"[^{string.ascii_lowercase}{string.ascii_uppercase}{string.digits}_]" +) diff --git a/lib/python3.11/site-packages/psycopg/_enums.py b/lib/python3.11/site-packages/psycopg/_enums.py new file mode 100644 index 0000000..a7cb78d --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_enums.py @@ -0,0 +1,79 @@ +""" +Enum values for psycopg + +These values are defined by us and are not necessarily dependent on +libpq-defined enums. +""" + +# Copyright (C) 2020 The Psycopg Team + +from enum import Enum, IntEnum +from selectors import EVENT_READ, EVENT_WRITE + +from . import pq + + +class Wait(IntEnum): + R = EVENT_READ + W = EVENT_WRITE + RW = EVENT_READ | EVENT_WRITE + + +class Ready(IntEnum): + R = EVENT_READ + W = EVENT_WRITE + RW = EVENT_READ | EVENT_WRITE + + +class PyFormat(str, Enum): + """ + Enum representing the format wanted for a query argument. + + The value `AUTO` allows psycopg to choose the best format for a certain + parameter. + """ + + __module__ = "psycopg.adapt" + + AUTO = "s" + """Automatically chosen (``%s`` placeholder).""" + TEXT = "t" + """Text parameter (``%t`` placeholder).""" + BINARY = "b" + """Binary parameter (``%b`` placeholder).""" + + @classmethod + def from_pq(cls, fmt: pq.Format) -> "PyFormat": + return _pg2py[fmt] + + @classmethod + def as_pq(cls, fmt: "PyFormat") -> pq.Format: + return _py2pg[fmt] + + +class IsolationLevel(IntEnum): + """ + Enum representing the isolation level for a transaction. + """ + + __module__ = "psycopg" + + READ_UNCOMMITTED = 1 + """:sql:`READ UNCOMMITTED` isolation level.""" + READ_COMMITTED = 2 + """:sql:`READ COMMITTED` isolation level.""" + REPEATABLE_READ = 3 + """:sql:`REPEATABLE READ` isolation level.""" + SERIALIZABLE = 4 + """:sql:`SERIALIZABLE` isolation level.""" + + +_py2pg = { + PyFormat.TEXT: pq.Format.TEXT, + PyFormat.BINARY: pq.Format.BINARY, +} + +_pg2py = { + pq.Format.TEXT: PyFormat.TEXT, + pq.Format.BINARY: PyFormat.BINARY, +} diff --git a/lib/python3.11/site-packages/psycopg/_pipeline.py b/lib/python3.11/site-packages/psycopg/_pipeline.py new file mode 100644 index 0000000..ecd6f06 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_pipeline.py @@ -0,0 +1,287 @@ +""" +commands pipeline management +""" + +# Copyright (C) 2021 The Psycopg Team + +import logging +from types import TracebackType +from typing import Any, List, Optional, Union, Tuple, Type, TypeVar, TYPE_CHECKING +from typing_extensions import TypeAlias + +from . import pq +from . import errors as e +from .abc import PipelineCommand, PQGen +from ._compat import Deque +from ._encodings import pgconn_encoding +from ._preparing import Key, Prepare +from .generators import pipeline_communicate, fetch_many, send + +if TYPE_CHECKING: + from .pq.abc import PGresult + from .cursor import BaseCursor + from .connection import BaseConnection, Connection + from .connection_async import AsyncConnection + + +PendingResult: TypeAlias = Union[ + None, Tuple["BaseCursor[Any, Any]", Optional[Tuple[Key, Prepare, bytes]]] +] + +FATAL_ERROR = pq.ExecStatus.FATAL_ERROR +PIPELINE_ABORTED = pq.ExecStatus.PIPELINE_ABORTED +BAD = pq.ConnStatus.BAD + +ACTIVE = pq.TransactionStatus.ACTIVE + +logger = logging.getLogger("psycopg") + + +class BasePipeline: + command_queue: Deque[PipelineCommand] + result_queue: Deque[PendingResult] + _is_supported: Optional[bool] = None + + def __init__(self, conn: "BaseConnection[Any]") -> None: + self._conn = conn + self.pgconn = conn.pgconn + self.command_queue = Deque[PipelineCommand]() + self.result_queue = Deque[PendingResult]() + self.level = 0 + + def __repr__(self) -> str: + cls = f"{self.__class__.__module__}.{self.__class__.__qualname__}" + info = pq.misc.connection_summary(self._conn.pgconn) + return f"<{cls} {info} at 0x{id(self):x}>" + + @property + def status(self) -> pq.PipelineStatus: + return pq.PipelineStatus(self.pgconn.pipeline_status) + + @classmethod + def is_supported(cls) -> bool: + """Return `!True` if the psycopg libpq wrapper supports pipeline mode.""" + if BasePipeline._is_supported is None: + BasePipeline._is_supported = not cls._not_supported_reason() + return BasePipeline._is_supported + + @classmethod + def _not_supported_reason(cls) -> str: + """Return the reason why the pipeline mode is not supported. + + Return an empty string if pipeline mode is supported. + """ + # Support only depends on the libpq functions available in the pq + # wrapper, not on the database version. + if pq.version() < 140000: + return ( + f"libpq too old {pq.version()};" + " v14 or greater required for pipeline mode" + ) + + if pq.__build_version__ < 140000: + return ( + f"libpq too old: module built for {pq.__build_version__};" + " v14 or greater required for pipeline mode" + ) + + return "" + + def _enter_gen(self) -> PQGen[None]: + if not self.is_supported(): + raise e.NotSupportedError( + f"pipeline mode not supported: {self._not_supported_reason()}" + ) + if self.level == 0: + self.pgconn.enter_pipeline_mode() + elif self.command_queue or self.pgconn.transaction_status == ACTIVE: + # Nested pipeline case. + # Transaction might be ACTIVE when the pipeline uses an "implicit + # transaction", typically in autocommit mode. But when entering a + # Psycopg transaction(), we expect the IDLE state. By sync()-ing, + # we make sure all previous commands are completed and the + # transaction gets back to IDLE. + yield from self._sync_gen() + self.level += 1 + + def _exit(self, exc: Optional[BaseException]) -> None: + self.level -= 1 + if self.level == 0 and self.pgconn.status != BAD: + try: + self.pgconn.exit_pipeline_mode() + except e.OperationalError as exc2: + # Notice that this error might be pretty irrecoverable. It + # happens on COPY, for instance: even if sync succeeds, exiting + # fails with "cannot exit pipeline mode with uncollected results" + if exc: + logger.warning("error ignored exiting %r: %s", self, exc2) + else: + raise exc2.with_traceback(None) + + def _sync_gen(self) -> PQGen[None]: + self._enqueue_sync() + yield from self._communicate_gen() + yield from self._fetch_gen(flush=False) + + def _exit_gen(self) -> PQGen[None]: + """ + Exit current pipeline by sending a Sync and fetch back all remaining results. + """ + try: + self._enqueue_sync() + yield from self._communicate_gen() + finally: + # No need to force flush since we emitted a sync just before. + yield from self._fetch_gen(flush=False) + + def _communicate_gen(self) -> PQGen[None]: + """Communicate with pipeline to send commands and possibly fetch + results, which are then processed. + """ + fetched = yield from pipeline_communicate(self.pgconn, self.command_queue) + to_process = [(self.result_queue.popleft(), results) for results in fetched] + for queued, results in to_process: + self._process_results(queued, results) + + def _fetch_gen(self, *, flush: bool) -> PQGen[None]: + """Fetch available results from the connection and process them with + pipeline queued items. + + If 'flush' is True, a PQsendFlushRequest() is issued in order to make + sure results can be fetched. Otherwise, the caller may emit a + PQpipelineSync() call to ensure the output buffer gets flushed before + fetching. + """ + if not self.result_queue: + return + + if flush: + self.pgconn.send_flush_request() + yield from send(self.pgconn) + + to_process = [] + while self.result_queue: + results = yield from fetch_many(self.pgconn) + if not results: + # No more results to fetch, but there may still be pending + # commands. + break + queued = self.result_queue.popleft() + to_process.append((queued, results)) + + for queued, results in to_process: + self._process_results(queued, results) + + def _process_results( + self, queued: PendingResult, results: List["PGresult"] + ) -> None: + """Process a results set fetched from the current pipeline. + + This matches 'results' with its respective element in the pipeline + queue. For commands (None value in the pipeline queue), results are + checked directly. For prepare statement creation requests, update the + cache. Otherwise, results are attached to their respective cursor. + """ + if queued is None: + (result,) = results + if result.status == FATAL_ERROR: + raise e.error_from_result(result, encoding=pgconn_encoding(self.pgconn)) + elif result.status == PIPELINE_ABORTED: + raise e.PipelineAborted("pipeline aborted") + else: + cursor, prepinfo = queued + cursor._set_results_from_pipeline(results) + if prepinfo: + key, prep, name = prepinfo + # Update the prepare state of the query. + cursor._conn._prepared.validate(key, prep, name, results) + + def _enqueue_sync(self) -> None: + """Enqueue a PQpipelineSync() command.""" + self.command_queue.append(self.pgconn.pipeline_sync) + self.result_queue.append(None) + + +class Pipeline(BasePipeline): + """Handler for connection in pipeline mode.""" + + __module__ = "psycopg" + _conn: "Connection[Any]" + _Self = TypeVar("_Self", bound="Pipeline") + + def __init__(self, conn: "Connection[Any]") -> None: + super().__init__(conn) + + def sync(self) -> None: + """Sync the pipeline, send any pending command and receive and process + all available results. + """ + try: + with self._conn.lock: + self._conn.wait(self._sync_gen()) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + def __enter__(self: _Self) -> _Self: + with self._conn.lock: + self._conn.wait(self._enter_gen()) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + try: + with self._conn.lock: + self._conn.wait(self._exit_gen()) + except Exception as exc2: + # Don't clobber an exception raised in the block with this one + if exc_val: + logger.warning("error ignored terminating %r: %s", self, exc2) + else: + raise exc2.with_traceback(None) + finally: + self._exit(exc_val) + + +class AsyncPipeline(BasePipeline): + """Handler for async connection in pipeline mode.""" + + __module__ = "psycopg" + _conn: "AsyncConnection[Any]" + _Self = TypeVar("_Self", bound="AsyncPipeline") + + def __init__(self, conn: "AsyncConnection[Any]") -> None: + super().__init__(conn) + + async def sync(self) -> None: + try: + async with self._conn.lock: + await self._conn.wait(self._sync_gen()) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + async def __aenter__(self: _Self) -> _Self: + async with self._conn.lock: + await self._conn.wait(self._enter_gen()) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + try: + async with self._conn.lock: + await self._conn.wait(self._exit_gen()) + except Exception as exc2: + # Don't clobber an exception raised in the block with this one + if exc_val: + logger.warning("error ignored terminating %r: %s", self, exc2) + else: + raise exc2.with_traceback(None) + finally: + self._exit(exc_val) diff --git a/lib/python3.11/site-packages/psycopg/_preparing.py b/lib/python3.11/site-packages/psycopg/_preparing.py new file mode 100644 index 0000000..f60c0cb --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_preparing.py @@ -0,0 +1,198 @@ +""" +Support for prepared statements +""" + +# Copyright (C) 2020 The Psycopg Team + +from enum import IntEnum, auto +from typing import Iterator, Optional, Sequence, Tuple, TYPE_CHECKING +from collections import OrderedDict +from typing_extensions import TypeAlias + +from . import pq +from ._compat import Deque +from ._queries import PostgresQuery + +if TYPE_CHECKING: + from .pq.abc import PGresult + +Key: TypeAlias = Tuple[bytes, Tuple[int, ...]] + +COMMAND_OK = pq.ExecStatus.COMMAND_OK +TUPLES_OK = pq.ExecStatus.TUPLES_OK + + +class Prepare(IntEnum): + NO = auto() + YES = auto() + SHOULD = auto() + + +class PrepareManager: + # Number of times a query is executed before it is prepared. + prepare_threshold: Optional[int] = 5 + + # Maximum number of prepared statements on the connection. + prepared_max: int = 100 + + def __init__(self) -> None: + # Map (query, types) to the number of times the query was seen. + self._counts: OrderedDict[Key, int] = OrderedDict() + + # Map (query, types) to the name of the statement if prepared. + self._names: OrderedDict[Key, bytes] = OrderedDict() + + # Counter to generate prepared statements names + self._prepared_idx = 0 + + self._maint_commands = Deque[bytes]() + + @staticmethod + def key(query: PostgresQuery) -> Key: + return (query.query, query.types) + + def get( + self, query: PostgresQuery, prepare: Optional[bool] = None + ) -> Tuple[Prepare, bytes]: + """ + Check if a query is prepared, tell back whether to prepare it. + """ + if prepare is False or self.prepare_threshold is None: + # The user doesn't want this query to be prepared + return Prepare.NO, b"" + + key = self.key(query) + name = self._names.get(key) + if name: + # The query was already prepared in this session + return Prepare.YES, name + + count = self._counts.get(key, 0) + if count >= self.prepare_threshold or prepare: + # The query has been executed enough times and needs to be prepared + name = f"_pg3_{self._prepared_idx}".encode() + self._prepared_idx += 1 + return Prepare.SHOULD, name + else: + # The query is not to be prepared yet + return Prepare.NO, b"" + + def _should_discard(self, prep: Prepare, results: Sequence["PGresult"]) -> bool: + """Check if we need to discard our entire state: it should happen on + rollback or on dropping objects, because the same object may get + recreated and postgres would fail internal lookups. + """ + if self._names or prep == Prepare.SHOULD: + for result in results: + if result.status != COMMAND_OK: + continue + cmdstat = result.command_status + if cmdstat and (cmdstat.startswith(b"DROP ") or cmdstat == b"ROLLBACK"): + return self.clear() + return False + + @staticmethod + def _check_results(results: Sequence["PGresult"]) -> bool: + """Return False if 'results' are invalid for prepared statement cache.""" + if len(results) != 1: + # We cannot prepare a multiple statement + return False + + status = results[0].status + if COMMAND_OK != status != TUPLES_OK: + # We don't prepare failed queries or other weird results + return False + + return True + + def _rotate(self) -> None: + """Evict an old value from the cache. + + If it was prepared, deallocate it. Do it only once: if the cache was + resized, deallocate gradually. + """ + if len(self._counts) > self.prepared_max: + self._counts.popitem(last=False) + + if len(self._names) > self.prepared_max: + name = self._names.popitem(last=False)[1] + self._maint_commands.append(b"DEALLOCATE " + name) + + def maybe_add_to_cache( + self, query: PostgresQuery, prep: Prepare, name: bytes + ) -> Optional[Key]: + """Handle 'query' for possible addition to the cache. + + If a new entry has been added, return its key. Return None otherwise + (meaning the query is already in cache or cache is not enabled). + + Note: This method is only called in pipeline mode. + """ + # don't do anything if prepared statements are disabled + if self.prepare_threshold is None: + return None + + key = self.key(query) + if key in self._counts: + if prep is Prepare.SHOULD: + del self._counts[key] + self._names[key] = name + else: + self._counts[key] += 1 + self._counts.move_to_end(key) + return None + + elif key in self._names: + self._names.move_to_end(key) + return None + + else: + if prep is Prepare.SHOULD: + self._names[key] = name + else: + self._counts[key] = 1 + return key + + def validate( + self, + key: Key, + prep: Prepare, + name: bytes, + results: Sequence["PGresult"], + ) -> None: + """Validate cached entry with 'key' by checking query 'results'. + + Possibly return a command to perform maintenance on database side. + + Note: this method is only called in pipeline mode. + """ + if self._should_discard(prep, results): + return + + if not self._check_results(results): + self._names.pop(key, None) + self._counts.pop(key, None) + else: + self._rotate() + + def clear(self) -> bool: + """Clear the cache of the maintenance commands. + + Clear the internal state and prepare a command to clear the state of + the server. + """ + self._counts.clear() + if self._names: + self._names.clear() + self._maint_commands.clear() + self._maint_commands.append(b"DEALLOCATE ALL") + return True + else: + return False + + def get_maintenance_commands(self) -> Iterator[bytes]: + """ + Iterate over the commands needed to align the server state to our state + """ + while self._maint_commands: + yield self._maint_commands.popleft() diff --git a/lib/python3.11/site-packages/psycopg/_queries.py b/lib/python3.11/site-packages/psycopg/_queries.py new file mode 100644 index 0000000..2a7554c --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_queries.py @@ -0,0 +1,375 @@ +""" +Utility module to manipulate queries +""" + +# Copyright (C) 2020 The Psycopg Team + +import re +from typing import Any, Dict, List, Mapping, Match, NamedTuple, Optional +from typing import Sequence, Tuple, Union, TYPE_CHECKING +from functools import lru_cache + +from . import pq +from . import errors as e +from .sql import Composable +from .abc import Buffer, Query, Params +from ._enums import PyFormat +from ._encodings import conn_encoding + +if TYPE_CHECKING: + from .abc import Transformer + + +class QueryPart(NamedTuple): + pre: bytes + item: Union[int, str] + format: PyFormat + + +class PostgresQuery: + """ + Helper to convert a Python query and parameters into Postgres format. + """ + + __slots__ = """ + query params types formats + _tx _want_formats _parts _encoding _order + """.split() + + def __init__(self, transformer: "Transformer"): + self._tx = transformer + + self.params: Optional[Sequence[Optional[Buffer]]] = None + # these are tuples so they can be used as keys e.g. in prepared stmts + self.types: Tuple[int, ...] = () + + # The format requested by the user and the ones to really pass Postgres + self._want_formats: Optional[List[PyFormat]] = None + self.formats: Optional[Sequence[pq.Format]] = None + + self._encoding = conn_encoding(transformer.connection) + self._parts: List[QueryPart] + self.query = b"" + self._order: Optional[List[str]] = None + + def convert(self, query: Query, vars: Optional[Params]) -> None: + """ + Set up the query and parameters to convert. + + The results of this function can be obtained accessing the object + attributes (`query`, `params`, `types`, `formats`). + """ + if isinstance(query, str): + bquery = query.encode(self._encoding) + elif isinstance(query, Composable): + bquery = query.as_bytes(self._tx) + else: + bquery = query + + if vars is not None: + ( + self.query, + self._want_formats, + self._order, + self._parts, + ) = _query2pg(bquery, self._encoding) + else: + self.query = bquery + self._want_formats = self._order = None + + self.dump(vars) + + def dump(self, vars: Optional[Params]) -> None: + """ + Process a new set of variables on the query processed by `convert()`. + + This method updates `params` and `types`. + """ + if vars is not None: + params = _validate_and_reorder_params(self._parts, vars, self._order) + assert self._want_formats is not None + self.params = self._tx.dump_sequence(params, self._want_formats) + self.types = self._tx.types or () + self.formats = self._tx.formats + else: + self.params = None + self.types = () + self.formats = None + + +class PostgresClientQuery(PostgresQuery): + """ + PostgresQuery subclass merging query and arguments client-side. + """ + + __slots__ = ("template",) + + def convert(self, query: Query, vars: Optional[Params]) -> None: + """ + Set up the query and parameters to convert. + + The results of this function can be obtained accessing the object + attributes (`query`, `params`, `types`, `formats`). + """ + if isinstance(query, str): + bquery = query.encode(self._encoding) + elif isinstance(query, Composable): + bquery = query.as_bytes(self._tx) + else: + bquery = query + + if vars is not None: + (self.template, self._order, self._parts) = _query2pg_client( + bquery, self._encoding + ) + else: + self.query = bquery + self._order = None + + self.dump(vars) + + def dump(self, vars: Optional[Params]) -> None: + """ + Process a new set of variables on the query processed by `convert()`. + + This method updates `params` and `types`. + """ + if vars is not None: + params = _validate_and_reorder_params(self._parts, vars, self._order) + self.params = tuple( + self._tx.as_literal(p) if p is not None else b"NULL" for p in params + ) + self.query = self.template % self.params + else: + self.params = None + + +@lru_cache() +def _query2pg( + query: bytes, encoding: str +) -> Tuple[bytes, List[PyFormat], Optional[List[str]], List[QueryPart]]: + """ + Convert Python query and params into something Postgres understands. + + - Convert Python placeholders (``%s``, ``%(name)s``) into Postgres + format (``$1``, ``$2``) + - placeholders can be %s, %t, or %b (auto, text or binary) + - return ``query`` (bytes), ``formats`` (list of formats) ``order`` + (sequence of names used in the query, in the position they appear) + ``parts`` (splits of queries and placeholders). + """ + parts = _split_query(query, encoding) + order: Optional[List[str]] = None + chunks: List[bytes] = [] + formats = [] + + if isinstance(parts[0].item, int): + for part in parts[:-1]: + assert isinstance(part.item, int) + chunks.append(part.pre) + chunks.append(b"$%d" % (part.item + 1)) + formats.append(part.format) + + elif isinstance(parts[0].item, str): + seen: Dict[str, Tuple[bytes, PyFormat]] = {} + order = [] + for part in parts[:-1]: + assert isinstance(part.item, str) + chunks.append(part.pre) + if part.item not in seen: + ph = b"$%d" % (len(seen) + 1) + seen[part.item] = (ph, part.format) + order.append(part.item) + chunks.append(ph) + formats.append(part.format) + else: + if seen[part.item][1] != part.format: + raise e.ProgrammingError( + f"placeholder '{part.item}' cannot have different formats" + ) + chunks.append(seen[part.item][0]) + + # last part + chunks.append(parts[-1].pre) + + return b"".join(chunks), formats, order, parts + + +@lru_cache() +def _query2pg_client( + query: bytes, encoding: str +) -> Tuple[bytes, Optional[List[str]], List[QueryPart]]: + """ + Convert Python query and params into a template to perform client-side binding + """ + parts = _split_query(query, encoding, collapse_double_percent=False) + order: Optional[List[str]] = None + chunks: List[bytes] = [] + + if isinstance(parts[0].item, int): + for part in parts[:-1]: + assert isinstance(part.item, int) + chunks.append(part.pre) + chunks.append(b"%s") + + elif isinstance(parts[0].item, str): + seen: Dict[str, Tuple[bytes, PyFormat]] = {} + order = [] + for part in parts[:-1]: + assert isinstance(part.item, str) + chunks.append(part.pre) + if part.item not in seen: + ph = b"%s" + seen[part.item] = (ph, part.format) + order.append(part.item) + chunks.append(ph) + else: + chunks.append(seen[part.item][0]) + order.append(part.item) + + # last part + chunks.append(parts[-1].pre) + + return b"".join(chunks), order, parts + + +def _validate_and_reorder_params( + parts: List[QueryPart], vars: Params, order: Optional[List[str]] +) -> Sequence[Any]: + """ + Verify the compatibility between a query and a set of params. + """ + # Try concrete types, then abstract types + t = type(vars) + if t is list or t is tuple: + sequence = True + elif t is dict: + sequence = False + elif isinstance(vars, Sequence) and not isinstance(vars, (bytes, str)): + sequence = True + elif isinstance(vars, Mapping): + sequence = False + else: + raise TypeError( + "query parameters should be a sequence or a mapping," + f" got {type(vars).__name__}" + ) + + if sequence: + if len(vars) != len(parts) - 1: + raise e.ProgrammingError( + f"the query has {len(parts) - 1} placeholders but" + f" {len(vars)} parameters were passed" + ) + if vars and not isinstance(parts[0].item, int): + raise TypeError("named placeholders require a mapping of parameters") + return vars # type: ignore[return-value] + + else: + if vars and len(parts) > 1 and not isinstance(parts[0][1], str): + raise TypeError( + "positional placeholders (%s) require a sequence of parameters" + ) + try: + return [vars[item] for item in order or ()] # type: ignore[call-overload] + except KeyError: + raise e.ProgrammingError( + "query parameter missing:" + f" {', '.join(sorted(i for i in order or () if i not in vars))}" + ) + + +_re_placeholder = re.compile( + rb"""(?x) + % # a literal % + (?: + (?: + \( ([^)]+) \) # or a name in (braces) + . # followed by a format + ) + | + (?:.) # or any char, really + ) + """ +) + + +def _split_query( + query: bytes, encoding: str = "ascii", collapse_double_percent: bool = True +) -> List[QueryPart]: + parts: List[Tuple[bytes, Optional[Match[bytes]]]] = [] + cur = 0 + + # pairs [(fragment, match], with the last match None + m = None + for m in _re_placeholder.finditer(query): + pre = query[cur : m.span(0)[0]] + parts.append((pre, m)) + cur = m.span(0)[1] + if m: + parts.append((query[cur:], None)) + else: + parts.append((query, None)) + + rv = [] + + # drop the "%%", validate + i = 0 + phtype = None + while i < len(parts): + pre, m = parts[i] + if m is None: + # last part + rv.append(QueryPart(pre, 0, PyFormat.AUTO)) + break + + ph = m.group(0) + if ph == b"%%": + # unescape '%%' to '%' if necessary, then merge the parts + if collapse_double_percent: + ph = b"%" + pre1, m1 = parts[i + 1] + parts[i + 1] = (pre + ph + pre1, m1) + del parts[i] + continue + + if ph == b"%(": + raise e.ProgrammingError( + "incomplete placeholder:" + f" '{query[m.span(0)[0]:].split()[0].decode(encoding)}'" + ) + elif ph == b"% ": + # explicit messasge for a typical error + raise e.ProgrammingError( + "incomplete placeholder: '%'; if you want to use '%' as an" + " operator you can double it up, i.e. use '%%'" + ) + elif ph[-1:] not in b"sbt": + raise e.ProgrammingError( + "only '%s', '%b', '%t' are allowed as placeholders, got" + f" '{m.group(0).decode(encoding)}'" + ) + + # Index or name + item: Union[int, str] + item = m.group(1).decode(encoding) if m.group(1) else i + + if not phtype: + phtype = type(item) + elif phtype is not type(item): + raise e.ProgrammingError( + "positional and named placeholders cannot be mixed" + ) + + format = _ph_to_fmt[ph[-1:]] + rv.append(QueryPart(pre, item, format)) + i += 1 + + return rv + + +_ph_to_fmt = { + b"s": PyFormat.AUTO, + b"t": PyFormat.TEXT, + b"b": PyFormat.BINARY, +} diff --git a/lib/python3.11/site-packages/psycopg/_struct.py b/lib/python3.11/site-packages/psycopg/_struct.py new file mode 100644 index 0000000..28a6084 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_struct.py @@ -0,0 +1,57 @@ +""" +Utility functions to deal with binary structs. +""" + +# Copyright (C) 2020 The Psycopg Team + +import struct +from typing import Callable, cast, Optional, Tuple +from typing_extensions import TypeAlias + +from .abc import Buffer +from . import errors as e +from ._compat import Protocol + +PackInt: TypeAlias = Callable[[int], bytes] +UnpackInt: TypeAlias = Callable[[Buffer], Tuple[int]] +PackFloat: TypeAlias = Callable[[float], bytes] +UnpackFloat: TypeAlias = Callable[[Buffer], Tuple[float]] + + +class UnpackLen(Protocol): + def __call__(self, data: Buffer, start: Optional[int]) -> Tuple[int]: + ... + + +pack_int2 = cast(PackInt, struct.Struct("!h").pack) +pack_uint2 = cast(PackInt, struct.Struct("!H").pack) +pack_int4 = cast(PackInt, struct.Struct("!i").pack) +pack_uint4 = cast(PackInt, struct.Struct("!I").pack) +pack_int8 = cast(PackInt, struct.Struct("!q").pack) +pack_float4 = cast(PackFloat, struct.Struct("!f").pack) +pack_float8 = cast(PackFloat, struct.Struct("!d").pack) + +unpack_int2 = cast(UnpackInt, struct.Struct("!h").unpack) +unpack_uint2 = cast(UnpackInt, struct.Struct("!H").unpack) +unpack_int4 = cast(UnpackInt, struct.Struct("!i").unpack) +unpack_uint4 = cast(UnpackInt, struct.Struct("!I").unpack) +unpack_int8 = cast(UnpackInt, struct.Struct("!q").unpack) +unpack_float4 = cast(UnpackFloat, struct.Struct("!f").unpack) +unpack_float8 = cast(UnpackFloat, struct.Struct("!d").unpack) + +_struct_len = struct.Struct("!i") +pack_len = cast(Callable[[int], bytes], _struct_len.pack) +unpack_len = cast(UnpackLen, _struct_len.unpack_from) + + +def pack_float4_bug_304(x: float) -> bytes: + raise e.InterfaceError( + "cannot dump Float4: Python affected by bug #304. Note that the psycopg-c" + " and psycopg-binary packages are not affected by this issue." + " See https://github.com/psycopg/psycopg/issues/304" + ) + + +# If issue #304 is detected, raise an error instead of dumping wrong data. +if struct.Struct("!f").pack(1.0) != bytes.fromhex("3f800000"): + pack_float4 = pack_float4_bug_304 diff --git a/lib/python3.11/site-packages/psycopg/_tpc.py b/lib/python3.11/site-packages/psycopg/_tpc.py new file mode 100644 index 0000000..3528188 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_tpc.py @@ -0,0 +1,116 @@ +""" +psycopg two-phase commit support +""" + +# Copyright (C) 2021 The Psycopg Team + +import re +import datetime as dt +from base64 import b64encode, b64decode +from typing import Optional, Union +from dataclasses import dataclass, replace + +_re_xid = re.compile(r"^(\d+)_([^_]*)_([^_]*)$") + + +@dataclass(frozen=True) +class Xid: + """A two-phase commit transaction identifier. + + The object can also be unpacked as a 3-item tuple (`format_id`, `gtrid`, + `bqual`). + + """ + + format_id: Optional[int] + gtrid: str + bqual: Optional[str] + prepared: Optional[dt.datetime] = None + owner: Optional[str] = None + database: Optional[str] = None + + @classmethod + def from_string(cls, s: str) -> "Xid": + """Try to parse an XA triple from the string. + + This may fail for several reasons. In such case return an unparsed Xid. + """ + try: + return cls._parse_string(s) + except Exception: + return Xid(None, s, None) + + def __str__(self) -> str: + return self._as_tid() + + def __len__(self) -> int: + return 3 + + def __getitem__(self, index: int) -> Union[int, str, None]: + return (self.format_id, self.gtrid, self.bqual)[index] + + @classmethod + def _parse_string(cls, s: str) -> "Xid": + m = _re_xid.match(s) + if not m: + raise ValueError("bad Xid format") + + format_id = int(m.group(1)) + gtrid = b64decode(m.group(2)).decode() + bqual = b64decode(m.group(3)).decode() + return cls.from_parts(format_id, gtrid, bqual) + + @classmethod + def from_parts( + cls, format_id: Optional[int], gtrid: str, bqual: Optional[str] + ) -> "Xid": + if format_id is not None: + if bqual is None: + raise TypeError("if format_id is specified, bqual must be too") + if not 0 <= format_id < 0x80000000: + raise ValueError("format_id must be a non-negative 32-bit integer") + if len(bqual) > 64: + raise ValueError("bqual must be not longer than 64 chars") + if len(gtrid) > 64: + raise ValueError("gtrid must be not longer than 64 chars") + + elif bqual is None: + raise TypeError("if format_id is None, bqual must be None too") + + return Xid(format_id, gtrid, bqual) + + def _as_tid(self) -> str: + """ + Return the PostgreSQL transaction_id for this XA xid. + + PostgreSQL wants just a string, while the DBAPI supports the XA + standard and thus a triple. We use the same conversion algorithm + implemented by JDBC in order to allow some form of interoperation. + + see also: the pgjdbc implementation + http://cvs.pgfoundry.org/cgi-bin/cvsweb.cgi/jdbc/pgjdbc/org/ + postgresql/xa/RecoveredXid.java?rev=1.2 + """ + if self.format_id is None or self.bqual is None: + # Unparsed xid: return the gtrid. + return self.gtrid + + # XA xid: mash together the components. + egtrid = b64encode(self.gtrid.encode()).decode() + ebqual = b64encode(self.bqual.encode()).decode() + + return f"{self.format_id}_{egtrid}_{ebqual}" + + @classmethod + def _get_recover_query(cls) -> str: + return "SELECT gid, prepared, owner, database FROM pg_prepared_xacts" + + @classmethod + def _from_record( + cls, gid: str, prepared: dt.datetime, owner: str, database: str + ) -> "Xid": + xid = Xid.from_string(gid) + return replace(xid, prepared=prepared, owner=owner, database=database) + + +Xid.__module__ = "psycopg" diff --git a/lib/python3.11/site-packages/psycopg/_transform.py b/lib/python3.11/site-packages/psycopg/_transform.py new file mode 100644 index 0000000..19bd6ae --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_transform.py @@ -0,0 +1,350 @@ +""" +Helper object to transform values between Python and PostgreSQL +""" + +# Copyright (C) 2020 The Psycopg Team + +from typing import Any, Dict, List, Optional, Sequence, Tuple +from typing import DefaultDict, TYPE_CHECKING +from collections import defaultdict +from typing_extensions import TypeAlias + +from . import pq +from . import postgres +from . import errors as e +from .abc import Buffer, LoadFunc, AdaptContext, PyFormat, DumperKey, NoneType +from .rows import Row, RowMaker +from .postgres import INVALID_OID, TEXT_OID +from ._encodings import pgconn_encoding + +if TYPE_CHECKING: + from .abc import Dumper, Loader + from .adapt import AdaptersMap + from .pq.abc import PGresult + from .connection import BaseConnection + +DumperCache: TypeAlias = Dict[DumperKey, "Dumper"] +OidDumperCache: TypeAlias = Dict[int, "Dumper"] +LoaderCache: TypeAlias = Dict[int, "Loader"] + +TEXT = pq.Format.TEXT +PY_TEXT = PyFormat.TEXT + + +class Transformer(AdaptContext): + """ + An object that can adapt efficiently between Python and PostgreSQL. + + The life cycle of the object is the query, so it is assumed that attributes + such as the server version or the connection encoding will not change. The + object have its state so adapting several values of the same type can be + optimised. + + """ + + __module__ = "psycopg.adapt" + + __slots__ = """ + types formats + _conn _adapters _pgresult _dumpers _loaders _encoding _none_oid + _oid_dumpers _oid_types _row_dumpers _row_loaders + """.split() + + types: Optional[Tuple[int, ...]] + formats: Optional[List[pq.Format]] + + _adapters: "AdaptersMap" + _pgresult: Optional["PGresult"] + _none_oid: int + + def __init__(self, context: Optional[AdaptContext] = None): + self._pgresult = self.types = self.formats = None + + # WARNING: don't store context, or you'll create a loop with the Cursor + if context: + self._adapters = context.adapters + self._conn = context.connection + else: + self._adapters = postgres.adapters + self._conn = None + + # mapping fmt, class -> Dumper instance + self._dumpers: DefaultDict[PyFormat, DumperCache] + self._dumpers = defaultdict(dict) + + # mapping fmt, oid -> Dumper instance + # Not often used, so create it only if needed. + self._oid_dumpers: Optional[Tuple[OidDumperCache, OidDumperCache]] + self._oid_dumpers = None + + # mapping fmt, oid -> Loader instance + self._loaders: Tuple[LoaderCache, LoaderCache] = ({}, {}) + + self._row_dumpers: Optional[List["Dumper"]] = None + + # sequence of load functions from value to python + # the length of the result columns + self._row_loaders: List[LoadFunc] = [] + + # mapping oid -> type sql representation + self._oid_types: Dict[int, bytes] = {} + + self._encoding = "" + + @classmethod + def from_context(cls, context: Optional[AdaptContext]) -> "Transformer": + """ + Return a Transformer from an AdaptContext. + + If the context is a Transformer instance, just return it. + """ + if isinstance(context, Transformer): + return context + else: + return cls(context) + + @property + def connection(self) -> Optional["BaseConnection[Any]"]: + return self._conn + + @property + def encoding(self) -> str: + if not self._encoding: + conn = self.connection + self._encoding = pgconn_encoding(conn.pgconn) if conn else "utf-8" + return self._encoding + + @property + def adapters(self) -> "AdaptersMap": + return self._adapters + + @property + def pgresult(self) -> Optional["PGresult"]: + return self._pgresult + + def set_pgresult( + self, + result: Optional["PGresult"], + *, + set_loaders: bool = True, + format: Optional[pq.Format] = None, + ) -> None: + self._pgresult = result + + if not result: + self._nfields = self._ntuples = 0 + if set_loaders: + self._row_loaders = [] + return + + self._ntuples = result.ntuples + nf = self._nfields = result.nfields + + if not set_loaders: + return + + if not nf: + self._row_loaders = [] + return + + fmt: pq.Format + fmt = result.fformat(0) if format is None else format # type: ignore + self._row_loaders = [ + self.get_loader(result.ftype(i), fmt).load for i in range(nf) + ] + + def set_dumper_types(self, types: Sequence[int], format: pq.Format) -> None: + self._row_dumpers = [self.get_dumper_by_oid(oid, format) for oid in types] + self.types = tuple(types) + self.formats = [format] * len(types) + + def set_loader_types(self, types: Sequence[int], format: pq.Format) -> None: + self._row_loaders = [self.get_loader(oid, format).load for oid in types] + + def dump_sequence( + self, params: Sequence[Any], formats: Sequence[PyFormat] + ) -> Sequence[Optional[Buffer]]: + nparams = len(params) + out: List[Optional[Buffer]] = [None] * nparams + + # If we have dumpers, it means set_dumper_types had been called, in + # which case self.types and self.formats are set to sequences of the + # right size. + if self._row_dumpers: + for i in range(nparams): + param = params[i] + if param is not None: + out[i] = self._row_dumpers[i].dump(param) + return out + + types = [self._get_none_oid()] * nparams + pqformats = [TEXT] * nparams + + for i in range(nparams): + param = params[i] + if param is None: + continue + dumper = self.get_dumper(param, formats[i]) + out[i] = dumper.dump(param) + types[i] = dumper.oid + pqformats[i] = dumper.format + + self.types = tuple(types) + self.formats = pqformats + + return out + + def as_literal(self, obj: Any) -> bytes: + dumper = self.get_dumper(obj, PY_TEXT) + rv = dumper.quote(obj) + # If the result is quoted, and the oid not unknown or text, + # add an explicit type cast. + # Check the last char because the first one might be 'E'. + oid = dumper.oid + if oid and rv and rv[-1] == b"'"[0] and oid != TEXT_OID: + try: + type_sql = self._oid_types[oid] + except KeyError: + ti = self.adapters.types.get(oid) + if ti: + if oid < 8192: + # builtin: prefer "timestamptz" to "timestamp with time zone" + type_sql = ti.name.encode(self.encoding) + else: + type_sql = ti.regtype.encode(self.encoding) + if oid == ti.array_oid: + type_sql += b"[]" + else: + type_sql = b"" + self._oid_types[oid] = type_sql + + if type_sql: + rv = b"%s::%s" % (rv, type_sql) + + if not isinstance(rv, bytes): + rv = bytes(rv) + return rv + + def get_dumper(self, obj: Any, format: PyFormat) -> "Dumper": + """ + Return a Dumper instance to dump `!obj`. + """ + # Normally, the type of the object dictates how to dump it + key = type(obj) + + # Reuse an existing Dumper class for objects of the same type + cache = self._dumpers[format] + try: + dumper = cache[key] + except KeyError: + # If it's the first time we see this type, look for a dumper + # configured for it. + dcls = self.adapters.get_dumper(key, format) + cache[key] = dumper = dcls(key, self) + + # Check if the dumper requires an upgrade to handle this specific value + key1 = dumper.get_key(obj, format) + if key1 is key: + return dumper + + # If it does, ask the dumper to create its own upgraded version + try: + return cache[key1] + except KeyError: + dumper = cache[key1] = dumper.upgrade(obj, format) + return dumper + + def _get_none_oid(self) -> int: + try: + return self._none_oid + except AttributeError: + pass + + try: + rv = self._none_oid = self._adapters.get_dumper(NoneType, PY_TEXT).oid + except KeyError: + raise e.InterfaceError("None dumper not found") + + return rv + + def get_dumper_by_oid(self, oid: int, format: pq.Format) -> "Dumper": + """ + Return a Dumper to dump an object to the type with given oid. + """ + if not self._oid_dumpers: + self._oid_dumpers = ({}, {}) + + # Reuse an existing Dumper class for objects of the same type + cache = self._oid_dumpers[format] + try: + return cache[oid] + except KeyError: + # If it's the first time we see this type, look for a dumper + # configured for it. + dcls = self.adapters.get_dumper_by_oid(oid, format) + cache[oid] = dumper = dcls(NoneType, self) + + return dumper + + def load_rows(self, row0: int, row1: int, make_row: RowMaker[Row]) -> List[Row]: + res = self._pgresult + if not res: + raise e.InterfaceError("result not set") + + if not (0 <= row0 <= self._ntuples and 0 <= row1 <= self._ntuples): + raise e.InterfaceError( + f"rows must be included between 0 and {self._ntuples}" + ) + + records = [] + for row in range(row0, row1): + record: List[Any] = [None] * self._nfields + for col in range(self._nfields): + val = res.get_value(row, col) + if val is not None: + record[col] = self._row_loaders[col](val) + records.append(make_row(record)) + + return records + + def load_row(self, row: int, make_row: RowMaker[Row]) -> Optional[Row]: + res = self._pgresult + if not res: + return None + + if not 0 <= row < self._ntuples: + return None + + record: List[Any] = [None] * self._nfields + for col in range(self._nfields): + val = res.get_value(row, col) + if val is not None: + record[col] = self._row_loaders[col](val) + + return make_row(record) + + def load_sequence(self, record: Sequence[Optional[Buffer]]) -> Tuple[Any, ...]: + if len(self._row_loaders) != len(record): + raise e.ProgrammingError( + f"cannot load sequence of {len(record)} items:" + f" {len(self._row_loaders)} loaders registered" + ) + + return tuple( + (self._row_loaders[i](val) if val is not None else None) + for i, val in enumerate(record) + ) + + def get_loader(self, oid: int, format: pq.Format) -> "Loader": + try: + return self._loaders[format][oid] + except KeyError: + pass + + loader_cls = self._adapters.get_loader(oid, format) + if not loader_cls: + loader_cls = self._adapters.get_loader(INVALID_OID, format) + if not loader_cls: + raise e.InterfaceError("unknown oid loader not found") + loader = self._loaders[format][oid] = loader_cls(oid, self) + return loader diff --git a/lib/python3.11/site-packages/psycopg/_typeinfo.py b/lib/python3.11/site-packages/psycopg/_typeinfo.py new file mode 100644 index 0000000..08c5e65 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_typeinfo.py @@ -0,0 +1,500 @@ +""" +Information about PostgreSQL types + +These types allow to read information from the system catalog and provide +information to the adapters if needed. +""" + +# Copyright (C) 2020 The Psycopg Team +from enum import Enum +from typing import Any, Dict, Iterator, Optional, overload +from typing import Sequence, Tuple, Type, TypeVar, Union, TYPE_CHECKING +from typing_extensions import TypeAlias + +from . import errors as e +from .abc import AdaptContext, Query +from .rows import dict_row +from ._encodings import conn_encoding + +if TYPE_CHECKING: + from .connection import BaseConnection, Connection + from .connection_async import AsyncConnection + from .sql import Identifier, SQL + +T = TypeVar("T", bound="TypeInfo") +RegistryKey: TypeAlias = Union[str, int, Tuple[type, int]] + + +class TypeInfo: + """ + Hold information about a PostgreSQL base type. + """ + + __module__ = "psycopg.types" + + def __init__( + self, + name: str, + oid: int, + array_oid: int, + *, + regtype: str = "", + delimiter: str = ",", + ): + self.name = name + self.oid = oid + self.array_oid = array_oid + self.regtype = regtype or name + self.delimiter = delimiter + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__qualname__}:" + f" {self.name} (oid: {self.oid}, array oid: {self.array_oid})>" + ) + + @overload + @classmethod + def fetch( + cls: Type[T], conn: "Connection[Any]", name: Union[str, "Identifier"] + ) -> Optional[T]: + ... + + @overload + @classmethod + async def fetch( + cls: Type[T], conn: "AsyncConnection[Any]", name: Union[str, "Identifier"] + ) -> Optional[T]: + ... + + @classmethod + def fetch( + cls: Type[T], conn: "BaseConnection[Any]", name: Union[str, "Identifier"] + ) -> Any: + """Query a system catalog to read information about a type.""" + from .sql import Composable + from .connection import Connection + from .connection_async import AsyncConnection + + if isinstance(name, Composable): + name = name.as_string(conn) + + if isinstance(conn, Connection): + return cls._fetch(conn, name) + elif isinstance(conn, AsyncConnection): + return cls._fetch_async(conn, name) + else: + raise TypeError( + f"expected Connection or AsyncConnection, got {type(conn).__name__}" + ) + + @classmethod + def _fetch(cls: Type[T], conn: "Connection[Any]", name: str) -> Optional[T]: + # This might result in a nested transaction. What we want is to leave + # the function with the connection in the state we found (either idle + # or intrans) + try: + with conn.transaction(): + if conn_encoding(conn) == "ascii": + conn.execute("set local client_encoding to utf8") + with conn.cursor(row_factory=dict_row) as cur: + cur.execute(cls._get_info_query(conn), {"name": name}) + recs = cur.fetchall() + except e.UndefinedObject: + return None + + return cls._from_records(name, recs) + + @classmethod + async def _fetch_async( + cls: Type[T], conn: "AsyncConnection[Any]", name: str + ) -> Optional[T]: + try: + async with conn.transaction(): + if conn_encoding(conn) == "ascii": + await conn.execute("set local client_encoding to utf8") + async with conn.cursor(row_factory=dict_row) as cur: + await cur.execute(cls._get_info_query(conn), {"name": name}) + recs = await cur.fetchall() + except e.UndefinedObject: + return None + + return cls._from_records(name, recs) + + @classmethod + def _from_records( + cls: Type[T], name: str, recs: Sequence[Dict[str, Any]] + ) -> Optional[T]: + if len(recs) == 1: + return cls(**recs[0]) + elif not recs: + return None + else: + raise e.ProgrammingError(f"found {len(recs)} different types named {name}") + + def register(self, context: Optional[AdaptContext] = None) -> None: + """ + Register the type information, globally or in the specified `!context`. + """ + if context: + types = context.adapters.types + else: + from . import postgres + + types = postgres.types + + types.add(self) + + if self.array_oid: + from .types.array import register_array + + register_array(self, context) + + @classmethod + def _get_info_query(cls, conn: "BaseConnection[Any]") -> Query: + from .sql import SQL + + return SQL( + """\ +SELECT + typname AS name, oid, typarray AS array_oid, + oid::regtype::text AS regtype, typdelim AS delimiter +FROM pg_type t +WHERE t.oid = {regtype} +ORDER BY t.oid +""" + ).format(regtype=cls._to_regtype(conn)) + + @classmethod + def _has_to_regtype_function(cls, conn: "BaseConnection[Any]") -> bool: + # to_regtype() introduced in PostgreSQL 9.4 and CockroachDB 22.2 + info = conn.info + if info.vendor == "PostgreSQL": + return info.server_version >= 90400 + elif info.vendor == "CockroachDB": + return info.server_version >= 220200 + else: + return False + + @classmethod + def _to_regtype(cls, conn: "BaseConnection[Any]") -> "SQL": + # `to_regtype()` returns the type oid or NULL, unlike the :: operator, + # which returns the type or raises an exception, which requires + # a transaction rollback and leaves traces in the server logs. + + from .sql import SQL + + if cls._has_to_regtype_function(conn): + return SQL("to_regtype(%(name)s)") + else: + return SQL("%(name)s::regtype") + + def _added(self, registry: "TypesRegistry") -> None: + """Method called by the `!registry` when the object is added there.""" + pass + + +class RangeInfo(TypeInfo): + """Manage information about a range type.""" + + __module__ = "psycopg.types.range" + + def __init__( + self, + name: str, + oid: int, + array_oid: int, + *, + regtype: str = "", + subtype_oid: int, + ): + super().__init__(name, oid, array_oid, regtype=regtype) + self.subtype_oid = subtype_oid + + @classmethod + def _get_info_query(cls, conn: "BaseConnection[Any]") -> Query: + from .sql import SQL + + return SQL( + """\ +SELECT t.typname AS name, t.oid AS oid, t.typarray AS array_oid, + t.oid::regtype::text AS regtype, + r.rngsubtype AS subtype_oid +FROM pg_type t +JOIN pg_range r ON t.oid = r.rngtypid +WHERE t.oid = {regtype} +""" + ).format(regtype=cls._to_regtype(conn)) + + def _added(self, registry: "TypesRegistry") -> None: + # Map ranges subtypes to info + registry._registry[RangeInfo, self.subtype_oid] = self + + +class MultirangeInfo(TypeInfo): + """Manage information about a multirange type.""" + + __module__ = "psycopg.types.multirange" + + def __init__( + self, + name: str, + oid: int, + array_oid: int, + *, + regtype: str = "", + range_oid: int, + subtype_oid: int, + ): + super().__init__(name, oid, array_oid, regtype=regtype) + self.range_oid = range_oid + self.subtype_oid = subtype_oid + + @classmethod + def _get_info_query(cls, conn: "BaseConnection[Any]") -> Query: + from .sql import SQL + + if conn.info.server_version < 140000: + raise e.NotSupportedError( + "multirange types are only available from PostgreSQL 14" + ) + + return SQL( + """\ +SELECT t.typname AS name, t.oid AS oid, t.typarray AS array_oid, + t.oid::regtype::text AS regtype, + r.rngtypid AS range_oid, r.rngsubtype AS subtype_oid +FROM pg_type t +JOIN pg_range r ON t.oid = r.rngmultitypid +WHERE t.oid = {regtype} +""" + ).format(regtype=cls._to_regtype(conn)) + + def _added(self, registry: "TypesRegistry") -> None: + # Map multiranges ranges and subtypes to info + registry._registry[MultirangeInfo, self.range_oid] = self + registry._registry[MultirangeInfo, self.subtype_oid] = self + + +class CompositeInfo(TypeInfo): + """Manage information about a composite type.""" + + __module__ = "psycopg.types.composite" + + def __init__( + self, + name: str, + oid: int, + array_oid: int, + *, + regtype: str = "", + field_names: Sequence[str], + field_types: Sequence[int], + ): + super().__init__(name, oid, array_oid, regtype=regtype) + self.field_names = field_names + self.field_types = field_types + # Will be set by register() if the `factory` is a type + self.python_type: Optional[type] = None + + @classmethod + def _get_info_query(cls, conn: "BaseConnection[Any]") -> Query: + from .sql import SQL + + return SQL( + """\ +SELECT + t.typname AS name, t.oid AS oid, t.typarray AS array_oid, + t.oid::regtype::text AS regtype, + coalesce(a.fnames, '{{}}') AS field_names, + coalesce(a.ftypes, '{{}}') AS field_types +FROM pg_type t +LEFT JOIN ( + SELECT + attrelid, + array_agg(attname) AS fnames, + array_agg(atttypid) AS ftypes + FROM ( + SELECT a.attrelid, a.attname, a.atttypid + FROM pg_attribute a + JOIN pg_type t ON t.typrelid = a.attrelid + WHERE t.oid = {regtype} + AND a.attnum > 0 + AND NOT a.attisdropped + ORDER BY a.attnum + ) x + GROUP BY attrelid +) a ON a.attrelid = t.typrelid +WHERE t.oid = {regtype} +""" + ).format(regtype=cls._to_regtype(conn)) + + +class EnumInfo(TypeInfo): + """Manage information about an enum type.""" + + __module__ = "psycopg.types.enum" + + def __init__( + self, + name: str, + oid: int, + array_oid: int, + labels: Sequence[str], + ): + super().__init__(name, oid, array_oid) + self.labels = labels + # Will be set by register_enum() + self.enum: Optional[Type[Enum]] = None + + @classmethod + def _get_info_query(cls, conn: "BaseConnection[Any]") -> Query: + from .sql import SQL + + return SQL( + """\ +SELECT name, oid, array_oid, array_agg(label) AS labels +FROM ( + SELECT + t.typname AS name, t.oid AS oid, t.typarray AS array_oid, + e.enumlabel AS label + FROM pg_type t + LEFT JOIN pg_enum e + ON e.enumtypid = t.oid + WHERE t.oid = {regtype} + ORDER BY e.enumsortorder +) x +GROUP BY name, oid, array_oid +""" + ).format(regtype=cls._to_regtype(conn)) + + +class TypesRegistry: + """ + Container for the information about types in a database. + """ + + __module__ = "psycopg.types" + + def __init__(self, template: Optional["TypesRegistry"] = None): + self._registry: Dict[RegistryKey, TypeInfo] + + # Make a shallow copy: it will become a proper copy if the registry + # is edited. + if template: + self._registry = template._registry + self._own_state = False + template._own_state = False + else: + self.clear() + + def clear(self) -> None: + self._registry = {} + self._own_state = True + + def add(self, info: TypeInfo) -> None: + self._ensure_own_state() + if info.oid: + self._registry[info.oid] = info + if info.array_oid: + self._registry[info.array_oid] = info + self._registry[info.name] = info + + if info.regtype and info.regtype not in self._registry: + self._registry[info.regtype] = info + + # Allow info to customise further their relation with the registry + info._added(self) + + def __iter__(self) -> Iterator[TypeInfo]: + seen = set() + for t in self._registry.values(): + if id(t) not in seen: + seen.add(id(t)) + yield t + + @overload + def __getitem__(self, key: Union[str, int]) -> TypeInfo: + ... + + @overload + def __getitem__(self, key: Tuple[Type[T], int]) -> T: + ... + + def __getitem__(self, key: RegistryKey) -> TypeInfo: + """ + Return info about a type, specified by name or oid + + :param key: the name or oid of the type to look for. + + Raise KeyError if not found. + """ + if isinstance(key, str): + if key.endswith("[]"): + key = key[:-2] + elif not isinstance(key, (int, tuple)): + raise TypeError(f"the key must be an oid or a name, got {type(key)}") + try: + return self._registry[key] + except KeyError: + raise KeyError(f"couldn't find the type {key!r} in the types registry") + + @overload + def get(self, key: Union[str, int]) -> Optional[TypeInfo]: + ... + + @overload + def get(self, key: Tuple[Type[T], int]) -> Optional[T]: + ... + + def get(self, key: RegistryKey) -> Optional[TypeInfo]: + """ + Return info about a type, specified by name or oid + + :param key: the name or oid of the type to look for. + + Unlike `__getitem__`, return None if not found. + """ + try: + return self[key] + except KeyError: + return None + + def get_oid(self, name: str) -> int: + """ + Return the oid of a PostgreSQL type by name. + + :param key: the name of the type to look for. + + Return the array oid if the type ends with "``[]``" + + Raise KeyError if the name is unknown. + """ + t = self[name] + if name.endswith("[]"): + return t.array_oid + else: + return t.oid + + def get_by_subtype(self, cls: Type[T], subtype: Union[int, str]) -> Optional[T]: + """ + Return info about a `TypeInfo` subclass by its element name or oid. + + :param cls: the subtype of `!TypeInfo` to look for. Currently + supported are `~psycopg.types.range.RangeInfo` and + `~psycopg.types.multirange.MultirangeInfo`. + :param subtype: The name or OID of the subtype of the element to look for. + :return: The `!TypeInfo` object of class `!cls` whose subtype is + `!subtype`. `!None` if the element or its range are not found. + """ + try: + info = self[subtype] + except KeyError: + return None + return self.get((cls, info.oid)) + + def _ensure_own_state(self) -> None: + # Time to write! so, copy. + if not self._own_state: + self._registry = self._registry.copy() + self._own_state = True diff --git a/lib/python3.11/site-packages/psycopg/_tz.py b/lib/python3.11/site-packages/psycopg/_tz.py new file mode 100644 index 0000000..813ed62 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_tz.py @@ -0,0 +1,44 @@ +""" +Timezone utility functions. +""" + +# Copyright (C) 2020 The Psycopg Team + +import logging +from typing import Dict, Optional, Union +from datetime import timezone, tzinfo + +from .pq.abc import PGconn +from ._compat import ZoneInfo + +logger = logging.getLogger("psycopg") + +_timezones: Dict[Union[None, bytes], tzinfo] = { + None: timezone.utc, + b"UTC": timezone.utc, +} + + +def get_tzinfo(pgconn: Optional[PGconn]) -> tzinfo: + """Return the Python timezone info of the connection's timezone.""" + tzname = pgconn.parameter_status(b"TimeZone") if pgconn else None + try: + return _timezones[tzname] + except KeyError: + sname = tzname.decode() if tzname else "UTC" + try: + zi: tzinfo = ZoneInfo(sname) + except (KeyError, OSError): + logger.warning("unknown PostgreSQL timezone: %r; will use UTC", sname) + zi = timezone.utc + except Exception as ex: + logger.warning( + "error handling PostgreSQL timezone: %r; will use UTC (%s - %s)", + sname, + type(ex).__name__, + ex, + ) + zi = timezone.utc + + _timezones[tzname] = zi + return zi diff --git a/lib/python3.11/site-packages/psycopg/_wrappers.py b/lib/python3.11/site-packages/psycopg/_wrappers.py new file mode 100644 index 0000000..f861741 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/_wrappers.py @@ -0,0 +1,137 @@ +""" +Wrappers for numeric types. +""" + +# Copyright (C) 2020 The Psycopg Team + +# Wrappers to force numbers to be cast as specific PostgreSQL types + +# These types are implemented here but exposed by `psycopg.types.numeric`. +# They are defined here to avoid a circular import. +_MODULE = "psycopg.types.numeric" + + +class Int2(int): + """ + Force dumping a Python `!int` as a PostgreSQL :sql:`smallint/int2`. + """ + + __module__ = _MODULE + __slots__ = () + + def __new__(cls, arg: int) -> "Int2": + return super().__new__(cls, arg) + + def __str__(self) -> str: + return super().__repr__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + +class Int4(int): + """ + Force dumping a Python `!int` as a PostgreSQL :sql:`integer/int4`. + """ + + __module__ = _MODULE + __slots__ = () + + def __new__(cls, arg: int) -> "Int4": + return super().__new__(cls, arg) + + def __str__(self) -> str: + return super().__repr__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + +class Int8(int): + """ + Force dumping a Python `!int` as a PostgreSQL :sql:`bigint/int8`. + """ + + __module__ = _MODULE + __slots__ = () + + def __new__(cls, arg: int) -> "Int8": + return super().__new__(cls, arg) + + def __str__(self) -> str: + return super().__repr__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + +class IntNumeric(int): + """ + Force dumping a Python `!int` as a PostgreSQL :sql:`numeric/decimal`. + """ + + __module__ = _MODULE + __slots__ = () + + def __new__(cls, arg: int) -> "IntNumeric": + return super().__new__(cls, arg) + + def __str__(self) -> str: + return super().__repr__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + +class Float4(float): + """ + Force dumping a Python `!float` as a PostgreSQL :sql:`float4/real`. + """ + + __module__ = _MODULE + __slots__ = () + + def __new__(cls, arg: float) -> "Float4": + return super().__new__(cls, arg) + + def __str__(self) -> str: + return super().__repr__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + +class Float8(float): + """ + Force dumping a Python `!float` as a PostgreSQL :sql:`float8/double precision`. + """ + + __module__ = _MODULE + __slots__ = () + + def __new__(cls, arg: float) -> "Float8": + return super().__new__(cls, arg) + + def __str__(self) -> str: + return super().__repr__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + +class Oid(int): + """ + Force dumping a Python `!int` as a PostgreSQL :sql:`oid`. + """ + + __module__ = _MODULE + __slots__ = () + + def __new__(cls, arg: int) -> "Oid": + return super().__new__(cls, arg) + + def __str__(self) -> str: + return super().__repr__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" diff --git a/lib/python3.11/site-packages/psycopg/abc.py b/lib/python3.11/site-packages/psycopg/abc.py new file mode 100644 index 0000000..0cf1a75 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/abc.py @@ -0,0 +1,265 @@ +""" +Protocol objects representing different implementations of the same classes. +""" + +# Copyright (C) 2020 The Psycopg Team + +from typing import Any, Callable, Generator, Mapping +from typing import List, Optional, Sequence, Tuple, TypeVar, Union +from typing import TYPE_CHECKING +from typing_extensions import TypeAlias + +from . import pq +from ._enums import PyFormat as PyFormat +from ._compat import Protocol, LiteralString + +if TYPE_CHECKING: + from . import sql + from .rows import Row, RowMaker + from .pq.abc import PGresult + from .waiting import Wait, Ready + from .connection import BaseConnection + from ._adapters_map import AdaptersMap + +NoneType: type = type(None) + +# An object implementing the buffer protocol +Buffer: TypeAlias = Union[bytes, bytearray, memoryview] + +Query: TypeAlias = Union[LiteralString, bytes, "sql.SQL", "sql.Composed"] +Params: TypeAlias = Union[Sequence[Any], Mapping[str, Any]] +ConnectionType = TypeVar("ConnectionType", bound="BaseConnection[Any]") +PipelineCommand: TypeAlias = Callable[[], None] +DumperKey: TypeAlias = Union[type, Tuple["DumperKey", ...]] + +# Waiting protocol types + +RV = TypeVar("RV") + +PQGenConn: TypeAlias = Generator[Tuple[int, "Wait"], "Ready", RV] +"""Generator for processes where the connection file number can change. + +This can happen in connection and reset, but not in normal querying. +""" + +PQGen: TypeAlias = Generator["Wait", "Ready", RV] +"""Generator for processes where the connection file number won't change. +""" + + +class WaitFunc(Protocol): + """ + Wait on the connection which generated `PQgen` and return its final result. + """ + + def __call__( + self, gen: PQGen[RV], fileno: int, timeout: Optional[float] = None + ) -> RV: + ... + + +# Adaptation types + +DumpFunc: TypeAlias = Callable[[Any], Buffer] +LoadFunc: TypeAlias = Callable[[Buffer], Any] + + +class AdaptContext(Protocol): + """ + A context describing how types are adapted. + + Example of `~AdaptContext` are `~psycopg.Connection`, `~psycopg.Cursor`, + `~psycopg.adapt.Transformer`, `~psycopg.adapt.AdaptersMap`. + + Note that this is a `~typing.Protocol`, so objects implementing + `!AdaptContext` don't need to explicitly inherit from this class. + + """ + + @property + def adapters(self) -> "AdaptersMap": + """The adapters configuration that this object uses.""" + ... + + @property + def connection(self) -> Optional["BaseConnection[Any]"]: + """The connection used by this object, if available. + + :rtype: `~psycopg.Connection` or `~psycopg.AsyncConnection` or `!None` + """ + ... + + +class Dumper(Protocol): + """ + Convert Python objects of type `!cls` to PostgreSQL representation. + """ + + format: pq.Format + """ + The format that this class `dump()` method produces, + `~psycopg.pq.Format.TEXT` or `~psycopg.pq.Format.BINARY`. + + This is a class attribute. + """ + + oid: int + """The oid to pass to the server, if known; 0 otherwise (class attribute).""" + + def __init__(self, cls: type, context: Optional[AdaptContext] = None): + ... + + def dump(self, obj: Any) -> Buffer: + """Convert the object `!obj` to PostgreSQL representation. + + :param obj: the object to convert. + """ + ... + + def quote(self, obj: Any) -> Buffer: + """Convert the object `!obj` to escaped representation. + + :param obj: the object to convert. + """ + ... + + def get_key(self, obj: Any, format: PyFormat) -> DumperKey: + """Return an alternative key to upgrade the dumper to represent `!obj`. + + :param obj: The object to convert + :param format: The format to convert to + + Normally the type of the object is all it takes to define how to dump + the object to the database. For instance, a Python `~datetime.date` can + be simply converted into a PostgreSQL :sql:`date`. + + In a few cases, just the type is not enough. For example: + + - A Python `~datetime.datetime` could be represented as a + :sql:`timestamptz` or a :sql:`timestamp`, according to whether it + specifies a `!tzinfo` or not. + + - A Python int could be stored as several Postgres types: int2, int4, + int8, numeric. If a type too small is used, it may result in an + overflow. If a type too large is used, PostgreSQL may not want to + cast it to a smaller type. + + - Python lists should be dumped according to the type they contain to + convert them to e.g. array of strings, array of ints (and which + size of int?...) + + In these cases, a dumper can implement `!get_key()` and return a new + class, or sequence of classes, that can be used to identify the same + dumper again. If the mechanism is not needed, the method should return + the same `!cls` object passed in the constructor. + + If a dumper implements `get_key()` it should also implement + `upgrade()`. + + """ + ... + + def upgrade(self, obj: Any, format: PyFormat) -> "Dumper": + """Return a new dumper to manage `!obj`. + + :param obj: The object to convert + :param format: The format to convert to + + Once `Transformer.get_dumper()` has been notified by `get_key()` that + this Dumper class cannot handle `!obj` itself, it will invoke + `!upgrade()`, which should return a new `Dumper` instance, which will + be reused for every objects for which `!get_key()` returns the same + result. + """ + ... + + +class Loader(Protocol): + """ + Convert PostgreSQL values with type OID `!oid` to Python objects. + """ + + format: pq.Format + """ + The format that this class `load()` method can convert, + `~psycopg.pq.Format.TEXT` or `~psycopg.pq.Format.BINARY`. + + This is a class attribute. + """ + + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + ... + + def load(self, data: Buffer) -> Any: + """ + Convert the data returned by the database into a Python object. + + :param data: the data to convert. + """ + ... + + +class Transformer(Protocol): + types: Optional[Tuple[int, ...]] + formats: Optional[List[pq.Format]] + + def __init__(self, context: Optional[AdaptContext] = None): + ... + + @classmethod + def from_context(cls, context: Optional[AdaptContext]) -> "Transformer": + ... + + @property + def connection(self) -> Optional["BaseConnection[Any]"]: + ... + + @property + def encoding(self) -> str: + ... + + @property + def adapters(self) -> "AdaptersMap": + ... + + @property + def pgresult(self) -> Optional["PGresult"]: + ... + + def set_pgresult( + self, + result: Optional["PGresult"], + *, + set_loaders: bool = True, + format: Optional[pq.Format] = None + ) -> None: + ... + + def set_dumper_types(self, types: Sequence[int], format: pq.Format) -> None: + ... + + def set_loader_types(self, types: Sequence[int], format: pq.Format) -> None: + ... + + def dump_sequence( + self, params: Sequence[Any], formats: Sequence[PyFormat] + ) -> Sequence[Optional[Buffer]]: + ... + + def as_literal(self, obj: Any) -> bytes: + ... + + def get_dumper(self, obj: Any, format: PyFormat) -> Dumper: + ... + + def load_rows(self, row0: int, row1: int, make_row: "RowMaker[Row]") -> List["Row"]: + ... + + def load_row(self, row: int, make_row: "RowMaker[Row]") -> Optional["Row"]: + ... + + def load_sequence(self, record: Sequence[Optional[Buffer]]) -> Tuple[Any, ...]: + ... + + def get_loader(self, oid: int, format: pq.Format) -> Loader: + ... diff --git a/lib/python3.11/site-packages/psycopg/adapt.py b/lib/python3.11/site-packages/psycopg/adapt.py new file mode 100644 index 0000000..7ec4a55 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/adapt.py @@ -0,0 +1,162 @@ +""" +Entry point into the adaptation system. +""" + +# Copyright (C) 2020 The Psycopg Team + +from abc import ABC, abstractmethod +from typing import Any, Optional, Type, TYPE_CHECKING + +from . import pq, abc +from . import _adapters_map +from ._enums import PyFormat as PyFormat +from ._cmodule import _psycopg + +if TYPE_CHECKING: + from .connection import BaseConnection + +AdaptersMap = _adapters_map.AdaptersMap +Buffer = abc.Buffer + +ORD_BS = ord("\\") + + +class Dumper(abc.Dumper, ABC): + """ + Convert Python object of the type `!cls` to PostgreSQL representation. + """ + + oid: int = 0 + """The oid to pass to the server, if known.""" + + format: pq.Format = pq.Format.TEXT + """The format of the data dumped.""" + + def __init__(self, cls: type, context: Optional[abc.AdaptContext] = None): + self.cls = cls + self.connection: Optional["BaseConnection[Any]"] = ( + context.connection if context else None + ) + + def __repr__(self) -> str: + return ( + f"<{type(self).__module__}.{type(self).__qualname__}" + f" (oid={self.oid}) at 0x{id(self):x}>" + ) + + @abstractmethod + def dump(self, obj: Any) -> Buffer: + ... + + def quote(self, obj: Any) -> Buffer: + """ + By default return the `dump()` value quoted and sanitised, so + that the result can be used to build a SQL string. This works well + for most types and you won't likely have to implement this method in a + subclass. + """ + value = self.dump(obj) + + if self.connection: + esc = pq.Escaping(self.connection.pgconn) + # escaping and quoting + return esc.escape_literal(value) + + # This path is taken when quote is asked without a connection, + # usually it means by psycopg.sql.quote() or by + # 'Composible.as_string(None)'. Most often than not this is done by + # someone generating a SQL file to consume elsewhere. + + # No quoting, only quote escaping, random bs escaping. See further. + esc = pq.Escaping() + out = esc.escape_string(value) + + # b"\\" in memoryview doesn't work so search for the ascii value + if ORD_BS not in out: + # If the string has no backslash, the result is correct and we + # don't need to bother with standard_conforming_strings. + return b"'" + out + b"'" + + # The libpq has a crazy behaviour: PQescapeString uses the last + # standard_conforming_strings setting seen on a connection. This + # means that backslashes might be escaped or might not. + # + # A syntax E'\\' works everywhere, whereas E'\' is an error. OTOH, + # if scs is off, '\\' raises a warning and '\' is an error. + # + # Check what the libpq does, and if it doesn't escape the backslash + # let's do it on our own. Never mind the race condition. + rv: bytes = b" E'" + out + b"'" + if esc.escape_string(b"\\") == b"\\": + rv = rv.replace(b"\\", b"\\\\") + return rv + + def get_key(self, obj: Any, format: PyFormat) -> abc.DumperKey: + """ + Implementation of the `~psycopg.abc.Dumper.get_key()` member of the + `~psycopg.abc.Dumper` protocol. Look at its definition for details. + + This implementation returns the `!cls` passed in the constructor. + Subclasses needing to specialise the PostgreSQL type according to the + *value* of the object dumped (not only according to to its type) + should override this class. + + """ + return self.cls + + def upgrade(self, obj: Any, format: PyFormat) -> "Dumper": + """ + Implementation of the `~psycopg.abc.Dumper.upgrade()` member of the + `~psycopg.abc.Dumper` protocol. Look at its definition for details. + + This implementation just returns `!self`. If a subclass implements + `get_key()` it should probably override `!upgrade()` too. + """ + return self + + +class Loader(abc.Loader, ABC): + """ + Convert PostgreSQL values with type OID `!oid` to Python objects. + """ + + format: pq.Format = pq.Format.TEXT + """The format of the data loaded.""" + + def __init__(self, oid: int, context: Optional[abc.AdaptContext] = None): + self.oid = oid + self.connection: Optional["BaseConnection[Any]"] = ( + context.connection if context else None + ) + + @abstractmethod + def load(self, data: Buffer) -> Any: + """Convert a PostgreSQL value to a Python object.""" + ... + + +Transformer: Type["abc.Transformer"] + +# Override it with fast object if available +if _psycopg: + Transformer = _psycopg.Transformer +else: + from . import _transform + + Transformer = _transform.Transformer + + +class RecursiveDumper(Dumper): + """Dumper with a transformer to help dumping recursive types.""" + + def __init__(self, cls: type, context: Optional[abc.AdaptContext] = None): + super().__init__(cls, context) + self._tx = Transformer.from_context(context) + + +class RecursiveLoader(Loader): + """Loader with a transformer to help loading recursive types.""" + + def __init__(self, oid: int, context: Optional[abc.AdaptContext] = None): + super().__init__(oid, context) + self._tx = Transformer.from_context(context) diff --git a/lib/python3.11/site-packages/psycopg/client_cursor.py b/lib/python3.11/site-packages/psycopg/client_cursor.py new file mode 100644 index 0000000..6271ec5 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/client_cursor.py @@ -0,0 +1,95 @@ +""" +psycopg client-side binding cursors +""" + +# Copyright (C) 2022 The Psycopg Team + +from typing import Optional, Tuple, TYPE_CHECKING +from functools import partial + +from ._queries import PostgresQuery, PostgresClientQuery + +from . import pq +from . import adapt +from . import errors as e +from .abc import ConnectionType, Query, Params +from .rows import Row +from .cursor import BaseCursor, Cursor +from ._preparing import Prepare +from .cursor_async import AsyncCursor + +if TYPE_CHECKING: + from typing import Any # noqa: F401 + from .connection import Connection # noqa: F401 + from .connection_async import AsyncConnection # noqa: F401 + +TEXT = pq.Format.TEXT +BINARY = pq.Format.BINARY + + +class ClientCursorMixin(BaseCursor[ConnectionType, Row]): + def mogrify(self, query: Query, params: Optional[Params] = None) -> str: + """ + Return the query and parameters merged. + + Parameters are adapted and merged to the query the same way that + `!execute()` would do. + + """ + self._tx = adapt.Transformer(self) + pgq = self._convert_query(query, params) + return pgq.query.decode(self._tx.encoding) + + def _execute_send( + self, + query: PostgresQuery, + *, + force_extended: bool = False, + binary: Optional[bool] = None, + ) -> None: + if binary is None: + fmt = self.format + else: + fmt = BINARY if binary else TEXT + + if fmt == BINARY: + raise e.NotSupportedError( + "client-side cursors don't support binary results" + ) + + self._query = query + + if self._conn._pipeline: + # In pipeline mode always use PQsendQueryParams - see #314 + # Multiple statements in the same query are not allowed anyway. + self._conn._pipeline.command_queue.append( + partial(self._pgconn.send_query_params, query.query, None) + ) + elif force_extended: + self._pgconn.send_query_params(query.query, None) + else: + # If we can, let's use simple query protocol, + # as it can execute more than one statement in a single query. + self._pgconn.send_query(query.query) + + def _convert_query( + self, query: Query, params: Optional[Params] = None + ) -> PostgresQuery: + pgq = PostgresClientQuery(self._tx) + pgq.convert(query, params) + return pgq + + def _get_prepared( + self, pgq: PostgresQuery, prepare: Optional[bool] = None + ) -> Tuple[Prepare, bytes]: + return (Prepare.NO, b"") + + +class ClientCursor(ClientCursorMixin["Connection[Any]", Row], Cursor[Row]): + __module__ = "psycopg" + + +class AsyncClientCursor( + ClientCursorMixin["AsyncConnection[Any]", Row], AsyncCursor[Row] +): + __module__ = "psycopg" diff --git a/lib/python3.11/site-packages/psycopg/connection.py b/lib/python3.11/site-packages/psycopg/connection.py new file mode 100644 index 0000000..299c4f3 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/connection.py @@ -0,0 +1,1031 @@ +""" +psycopg connection objects +""" + +# Copyright (C) 2020 The Psycopg Team + +import logging +import threading +from types import TracebackType +from typing import Any, Callable, cast, Dict, Generator, Generic, Iterator +from typing import List, NamedTuple, Optional, Type, TypeVar, Tuple, Union +from typing import overload, TYPE_CHECKING +from weakref import ref, ReferenceType +from warnings import warn +from functools import partial +from contextlib import contextmanager +from typing_extensions import TypeAlias + +from . import pq +from . import errors as e +from . import waiting +from . import postgres +from .abc import AdaptContext, ConnectionType, Params, Query, RV +from .abc import PQGen, PQGenConn +from .sql import Composable, SQL +from ._tpc import Xid +from .rows import Row, RowFactory, tuple_row, TupleRow, args_row +from .adapt import AdaptersMap +from ._enums import IsolationLevel +from .cursor import Cursor +from ._compat import LiteralString +from .conninfo import make_conninfo, conninfo_to_dict, ConnectionInfo +from ._pipeline import BasePipeline, Pipeline +from .generators import notifies, connect, execute +from ._encodings import pgconn_encoding +from ._preparing import PrepareManager +from .transaction import Transaction +from .server_cursor import ServerCursor + +if TYPE_CHECKING: + from .pq.abc import PGconn, PGresult + from psycopg_pool.base import BasePool + + +# Row Type variable for Cursor (when it needs to be distinguished from the +# connection's one) +CursorRow = TypeVar("CursorRow") + +TEXT = pq.Format.TEXT +BINARY = pq.Format.BINARY + +OK = pq.ConnStatus.OK +BAD = pq.ConnStatus.BAD + +COMMAND_OK = pq.ExecStatus.COMMAND_OK +TUPLES_OK = pq.ExecStatus.TUPLES_OK +FATAL_ERROR = pq.ExecStatus.FATAL_ERROR + +IDLE = pq.TransactionStatus.IDLE +INTRANS = pq.TransactionStatus.INTRANS + +logger = logging.getLogger("psycopg") + + +class Notify(NamedTuple): + """An asynchronous notification received from the database.""" + + channel: str + """The name of the channel on which the notification was received.""" + + payload: str + """The message attached to the notification.""" + + pid: int + """The PID of the backend process which sent the notification.""" + + +Notify.__module__ = "psycopg" + +NoticeHandler: TypeAlias = Callable[[e.Diagnostic], None] +NotifyHandler: TypeAlias = Callable[[Notify], None] + + +class BaseConnection(Generic[Row]): + """ + Base class for different types of connections. + + Share common functionalities such as access to the wrapped PGconn, but + allow different interfaces (sync/async). + """ + + # DBAPI2 exposed exceptions + Warning = e.Warning + Error = e.Error + InterfaceError = e.InterfaceError + DatabaseError = e.DatabaseError + DataError = e.DataError + OperationalError = e.OperationalError + IntegrityError = e.IntegrityError + InternalError = e.InternalError + ProgrammingError = e.ProgrammingError + NotSupportedError = e.NotSupportedError + + # Enums useful for the connection + ConnStatus = pq.ConnStatus + TransactionStatus = pq.TransactionStatus + + def __init__(self, pgconn: "PGconn"): + self.pgconn = pgconn + self._autocommit = False + + # None, but set to a copy of the global adapters map as soon as requested. + self._adapters: Optional[AdaptersMap] = None + + self._notice_handlers: List[NoticeHandler] = [] + self._notify_handlers: List[NotifyHandler] = [] + + # Number of transaction blocks currently entered + self._num_transactions = 0 + + self._closed = False # closed by an explicit close() + self._prepared: PrepareManager = PrepareManager() + self._tpc: Optional[Tuple[Xid, bool]] = None # xid, prepared + + wself = ref(self) + pgconn.notice_handler = partial(BaseConnection._notice_handler, wself) + pgconn.notify_handler = partial(BaseConnection._notify_handler, wself) + + # Attribute is only set if the connection is from a pool so we can tell + # apart a connection in the pool too (when _pool = None) + self._pool: Optional["BasePool[Any]"] + + self._pipeline: Optional[BasePipeline] = None + + # Time after which the connection should be closed + self._expire_at: float + + self._isolation_level: Optional[IsolationLevel] = None + self._read_only: Optional[bool] = None + self._deferrable: Optional[bool] = None + self._begin_statement = b"" + + def __del__(self) -> None: + # If fails on connection we might not have this attribute yet + if not hasattr(self, "pgconn"): + return + + # Connection correctly closed + if self.closed: + return + + # Connection in a pool so terminating with the program is normal + if hasattr(self, "_pool"): + return + + warn( + f"connection {self} was deleted while still open." + " Please use 'with' or '.close()' to close the connection", + ResourceWarning, + ) + + def __repr__(self) -> str: + cls = f"{self.__class__.__module__}.{self.__class__.__qualname__}" + info = pq.misc.connection_summary(self.pgconn) + return f"<{cls} {info} at 0x{id(self):x}>" + + @property + def closed(self) -> bool: + """`!True` if the connection is closed.""" + return self.pgconn.status == BAD + + @property + def broken(self) -> bool: + """ + `!True` if the connection was interrupted. + + A broken connection is always `closed`, but wasn't closed in a clean + way, such as using `close()` or a `!with` block. + """ + return self.pgconn.status == BAD and not self._closed + + @property + def autocommit(self) -> bool: + """The autocommit state of the connection.""" + return self._autocommit + + @autocommit.setter + def autocommit(self, value: bool) -> None: + self._set_autocommit(value) + + def _set_autocommit(self, value: bool) -> None: + raise NotImplementedError + + def _set_autocommit_gen(self, value: bool) -> PQGen[None]: + yield from self._check_intrans_gen("autocommit") + self._autocommit = bool(value) + + @property + def isolation_level(self) -> Optional[IsolationLevel]: + """ + The isolation level of the new transactions started on the connection. + """ + return self._isolation_level + + @isolation_level.setter + def isolation_level(self, value: Optional[IsolationLevel]) -> None: + self._set_isolation_level(value) + + def _set_isolation_level(self, value: Optional[IsolationLevel]) -> None: + raise NotImplementedError + + def _set_isolation_level_gen(self, value: Optional[IsolationLevel]) -> PQGen[None]: + yield from self._check_intrans_gen("isolation_level") + self._isolation_level = IsolationLevel(value) if value is not None else None + self._begin_statement = b"" + + @property + def read_only(self) -> Optional[bool]: + """ + The read-only state of the new transactions started on the connection. + """ + return self._read_only + + @read_only.setter + def read_only(self, value: Optional[bool]) -> None: + self._set_read_only(value) + + def _set_read_only(self, value: Optional[bool]) -> None: + raise NotImplementedError + + def _set_read_only_gen(self, value: Optional[bool]) -> PQGen[None]: + yield from self._check_intrans_gen("read_only") + self._read_only = bool(value) + self._begin_statement = b"" + + @property + def deferrable(self) -> Optional[bool]: + """ + The deferrable state of the new transactions started on the connection. + """ + return self._deferrable + + @deferrable.setter + def deferrable(self, value: Optional[bool]) -> None: + self._set_deferrable(value) + + def _set_deferrable(self, value: Optional[bool]) -> None: + raise NotImplementedError + + def _set_deferrable_gen(self, value: Optional[bool]) -> PQGen[None]: + yield from self._check_intrans_gen("deferrable") + self._deferrable = bool(value) + self._begin_statement = b"" + + def _check_intrans_gen(self, attribute: str) -> PQGen[None]: + # Raise an exception if we are in a transaction + status = self.pgconn.transaction_status + if status == IDLE and self._pipeline: + yield from self._pipeline._sync_gen() + status = self.pgconn.transaction_status + if status != IDLE: + if self._num_transactions: + raise e.ProgrammingError( + f"can't change {attribute!r} now: " + "connection.transaction() context in progress" + ) + else: + raise e.ProgrammingError( + f"can't change {attribute!r} now: " + "connection in transaction status " + f"{pq.TransactionStatus(status).name}" + ) + + @property + def info(self) -> ConnectionInfo: + """A `ConnectionInfo` attribute to inspect connection properties.""" + return ConnectionInfo(self.pgconn) + + @property + def adapters(self) -> AdaptersMap: + if not self._adapters: + self._adapters = AdaptersMap(postgres.adapters) + + return self._adapters + + @property + def connection(self) -> "BaseConnection[Row]": + # implement the AdaptContext protocol + return self + + def fileno(self) -> int: + """Return the file descriptor of the connection. + + This function allows to use the connection as file-like object in + functions waiting for readiness, such as the ones defined in the + `selectors` module. + """ + return self.pgconn.socket + + def cancel(self) -> None: + """Cancel the current operation on the connection.""" + # No-op if the connection is closed + # this allows to use the method as callback handler without caring + # about its life. + if self.closed: + return + + if self._tpc and self._tpc[1]: + raise e.ProgrammingError( + "cancel() cannot be used with a prepared two-phase transaction" + ) + + c = self.pgconn.get_cancel() + c.cancel() + + def add_notice_handler(self, callback: NoticeHandler) -> None: + """ + Register a callable to be invoked when a notice message is received. + + :param callback: the callback to call upon message received. + :type callback: Callable[[~psycopg.errors.Diagnostic], None] + """ + self._notice_handlers.append(callback) + + def remove_notice_handler(self, callback: NoticeHandler) -> None: + """ + Unregister a notice message callable previously registered. + + :param callback: the callback to remove. + :type callback: Callable[[~psycopg.errors.Diagnostic], None] + """ + self._notice_handlers.remove(callback) + + @staticmethod + def _notice_handler( + wself: "ReferenceType[BaseConnection[Row]]", res: "PGresult" + ) -> None: + self = wself() + if not (self and self._notice_handlers): + return + + diag = e.Diagnostic(res, pgconn_encoding(self.pgconn)) + for cb in self._notice_handlers: + try: + cb(diag) + except Exception as ex: + logger.exception("error processing notice callback '%s': %s", cb, ex) + + def add_notify_handler(self, callback: NotifyHandler) -> None: + """ + Register a callable to be invoked whenever a notification is received. + + :param callback: the callback to call upon notification received. + :type callback: Callable[[~psycopg.Notify], None] + """ + self._notify_handlers.append(callback) + + def remove_notify_handler(self, callback: NotifyHandler) -> None: + """ + Unregister a notification callable previously registered. + + :param callback: the callback to remove. + :type callback: Callable[[~psycopg.Notify], None] + """ + self._notify_handlers.remove(callback) + + @staticmethod + def _notify_handler( + wself: "ReferenceType[BaseConnection[Row]]", pgn: pq.PGnotify + ) -> None: + self = wself() + if not (self and self._notify_handlers): + return + + enc = pgconn_encoding(self.pgconn) + n = Notify(pgn.relname.decode(enc), pgn.extra.decode(enc), pgn.be_pid) + for cb in self._notify_handlers: + cb(n) + + @property + def prepare_threshold(self) -> Optional[int]: + """ + Number of times a query is executed before it is prepared. + + - If it is set to 0, every query is prepared the first time it is + executed. + - If it is set to `!None`, prepared statements are disabled on the + connection. + + Default value: 5 + """ + return self._prepared.prepare_threshold + + @prepare_threshold.setter + def prepare_threshold(self, value: Optional[int]) -> None: + self._prepared.prepare_threshold = value + + @property + def prepared_max(self) -> int: + """ + Maximum number of prepared statements on the connection. + + Default value: 100 + """ + return self._prepared.prepared_max + + @prepared_max.setter + def prepared_max(self, value: int) -> None: + self._prepared.prepared_max = value + + # Generators to perform high-level operations on the connection + # + # These operations are expressed in terms of non-blocking generators + # and the task of waiting when needed (when the generators yield) is left + # to the connections subclass, which might wait either in blocking mode + # or through asyncio. + # + # All these generators assume exclusive access to the connection: subclasses + # should have a lock and hold it before calling and consuming them. + + @classmethod + def _connect_gen( + cls: Type[ConnectionType], + conninfo: str = "", + *, + autocommit: bool = False, + ) -> PQGenConn[ConnectionType]: + """Generator to connect to the database and create a new instance.""" + pgconn = yield from connect(conninfo) + conn = cls(pgconn) + conn._autocommit = bool(autocommit) + return conn + + def _exec_command( + self, command: Query, result_format: pq.Format = TEXT + ) -> PQGen[Optional["PGresult"]]: + """ + Generator to send a command and receive the result to the backend. + + Only used to implement internal commands such as "commit", with eventual + arguments bound client-side. The cursor can do more complex stuff. + """ + self._check_connection_ok() + + if isinstance(command, str): + command = command.encode(pgconn_encoding(self.pgconn)) + elif isinstance(command, Composable): + command = command.as_bytes(self) + + if self._pipeline: + cmd = partial( + self.pgconn.send_query_params, + command, + None, + result_format=result_format, + ) + self._pipeline.command_queue.append(cmd) + self._pipeline.result_queue.append(None) + return None + + self.pgconn.send_query_params(command, None, result_format=result_format) + + result = (yield from execute(self.pgconn))[-1] + if result.status != COMMAND_OK and result.status != TUPLES_OK: + if result.status == FATAL_ERROR: + raise e.error_from_result(result, encoding=pgconn_encoding(self.pgconn)) + else: + raise e.InterfaceError( + f"unexpected result {pq.ExecStatus(result.status).name}" + f" from command {command.decode()!r}" + ) + return result + + def _check_connection_ok(self) -> None: + if self.pgconn.status == OK: + return + + if self.pgconn.status == BAD: + raise e.OperationalError("the connection is closed") + raise e.InterfaceError( + "cannot execute operations: the connection is" + f" in status {self.pgconn.status}" + ) + + def _start_query(self) -> PQGen[None]: + """Generator to start a transaction if necessary.""" + if self._autocommit: + return + + if self.pgconn.transaction_status != IDLE: + return + + yield from self._exec_command(self._get_tx_start_command()) + if self._pipeline: + yield from self._pipeline._sync_gen() + + def _get_tx_start_command(self) -> bytes: + if self._begin_statement: + return self._begin_statement + + parts = [b"BEGIN"] + + if self.isolation_level is not None: + val = IsolationLevel(self.isolation_level) + parts.append(b"ISOLATION LEVEL") + parts.append(val.name.replace("_", " ").encode()) + + if self.read_only is not None: + parts.append(b"READ ONLY" if self.read_only else b"READ WRITE") + + if self.deferrable is not None: + parts.append(b"DEFERRABLE" if self.deferrable else b"NOT DEFERRABLE") + + self._begin_statement = b" ".join(parts) + return self._begin_statement + + def _commit_gen(self) -> PQGen[None]: + """Generator implementing `Connection.commit()`.""" + if self._num_transactions: + raise e.ProgrammingError( + "Explicit commit() forbidden within a Transaction " + "context. (Transaction will be automatically committed " + "on successful exit from context.)" + ) + if self._tpc: + raise e.ProgrammingError( + "commit() cannot be used during a two-phase transaction" + ) + if self.pgconn.transaction_status == IDLE: + return + + yield from self._exec_command(b"COMMIT") + + if self._pipeline: + yield from self._pipeline._sync_gen() + + def _rollback_gen(self) -> PQGen[None]: + """Generator implementing `Connection.rollback()`.""" + if self._num_transactions: + raise e.ProgrammingError( + "Explicit rollback() forbidden within a Transaction " + "context. (Either raise Rollback() or allow " + "an exception to propagate out of the context.)" + ) + if self._tpc: + raise e.ProgrammingError( + "rollback() cannot be used during a two-phase transaction" + ) + + # Get out of a "pipeline aborted" state + if self._pipeline: + yield from self._pipeline._sync_gen() + + if self.pgconn.transaction_status == IDLE: + return + + yield from self._exec_command(b"ROLLBACK") + self._prepared.clear() + for cmd in self._prepared.get_maintenance_commands(): + yield from self._exec_command(cmd) + + if self._pipeline: + yield from self._pipeline._sync_gen() + + def xid(self, format_id: int, gtrid: str, bqual: str) -> Xid: + """ + Returns a `Xid` to pass to the `!tpc_*()` methods of this connection. + + The argument types and constraints are explained in + :ref:`two-phase-commit`. + + The values passed to the method will be available on the returned + object as the members `~Xid.format_id`, `~Xid.gtrid`, `~Xid.bqual`. + """ + self._check_tpc() + return Xid.from_parts(format_id, gtrid, bqual) + + def _tpc_begin_gen(self, xid: Union[Xid, str]) -> PQGen[None]: + self._check_tpc() + + if not isinstance(xid, Xid): + xid = Xid.from_string(xid) + + if self.pgconn.transaction_status != IDLE: + raise e.ProgrammingError( + "can't start two-phase transaction: connection in status" + f" {pq.TransactionStatus(self.pgconn.transaction_status).name}" + ) + + if self._autocommit: + raise e.ProgrammingError( + "can't use two-phase transactions in autocommit mode" + ) + + self._tpc = (xid, False) + yield from self._exec_command(self._get_tx_start_command()) + + def _tpc_prepare_gen(self) -> PQGen[None]: + if not self._tpc: + raise e.ProgrammingError( + "'tpc_prepare()' must be called inside a two-phase transaction" + ) + if self._tpc[1]: + raise e.ProgrammingError( + "'tpc_prepare()' cannot be used during a prepared two-phase transaction" + ) + xid = self._tpc[0] + self._tpc = (xid, True) + yield from self._exec_command(SQL("PREPARE TRANSACTION {}").format(str(xid))) + if self._pipeline: + yield from self._pipeline._sync_gen() + + def _tpc_finish_gen( + self, action: LiteralString, xid: Union[Xid, str, None] + ) -> PQGen[None]: + fname = f"tpc_{action.lower()}()" + if xid is None: + if not self._tpc: + raise e.ProgrammingError( + f"{fname} without xid must must be" + " called inside a two-phase transaction" + ) + xid = self._tpc[0] + else: + if self._tpc: + raise e.ProgrammingError( + f"{fname} with xid must must be called" + " outside a two-phase transaction" + ) + if not isinstance(xid, Xid): + xid = Xid.from_string(xid) + + if self._tpc and not self._tpc[1]: + meth: Callable[[], PQGen[None]] + meth = getattr(self, f"_{action.lower()}_gen") + self._tpc = None + yield from meth() + else: + yield from self._exec_command( + SQL("{} PREPARED {}").format(SQL(action), str(xid)) + ) + self._tpc = None + + def _check_tpc(self) -> None: + """Raise NotSupportedError if TPC is not supported.""" + # TPC supported on every supported PostgreSQL version. + pass + + +class Connection(BaseConnection[Row]): + """ + Wrapper for a connection to the database. + """ + + __module__ = "psycopg" + + cursor_factory: Type[Cursor[Row]] + server_cursor_factory: Type[ServerCursor[Row]] + row_factory: RowFactory[Row] + _pipeline: Optional[Pipeline] + _Self = TypeVar("_Self", bound="Connection[Any]") + + def __init__( + self, + pgconn: "PGconn", + row_factory: RowFactory[Row] = cast(RowFactory[Row], tuple_row), + ): + super().__init__(pgconn) + self.row_factory = row_factory + self.lock = threading.Lock() + self.cursor_factory = Cursor + self.server_cursor_factory = ServerCursor + + @overload + @classmethod + def connect( + cls, + conninfo: str = "", + *, + autocommit: bool = False, + row_factory: RowFactory[Row], + prepare_threshold: Optional[int] = 5, + cursor_factory: Optional[Type[Cursor[Row]]] = None, + context: Optional[AdaptContext] = None, + **kwargs: Union[None, int, str], + ) -> "Connection[Row]": + # TODO: returned type should be _Self. See #308. + ... + + @overload + @classmethod + def connect( + cls, + conninfo: str = "", + *, + autocommit: bool = False, + prepare_threshold: Optional[int] = 5, + cursor_factory: Optional[Type[Cursor[Any]]] = None, + context: Optional[AdaptContext] = None, + **kwargs: Union[None, int, str], + ) -> "Connection[TupleRow]": + ... + + @classmethod # type: ignore[misc] # https://github.com/python/mypy/issues/11004 + def connect( + cls, + conninfo: str = "", + *, + autocommit: bool = False, + prepare_threshold: Optional[int] = 5, + row_factory: Optional[RowFactory[Row]] = None, + cursor_factory: Optional[Type[Cursor[Row]]] = None, + context: Optional[AdaptContext] = None, + **kwargs: Any, + ) -> "Connection[Any]": + """ + Connect to a database server and return a new `Connection` instance. + """ + params = cls._get_connection_params(conninfo, **kwargs) + conninfo = make_conninfo(**params) + + try: + rv = cls._wait_conn( + cls._connect_gen(conninfo, autocommit=autocommit), + timeout=params["connect_timeout"], + ) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + if row_factory: + rv.row_factory = row_factory + if cursor_factory: + rv.cursor_factory = cursor_factory + if context: + rv._adapters = AdaptersMap(context.adapters) + rv.prepare_threshold = prepare_threshold + return rv + + def __enter__(self: _Self) -> _Self: + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + if self.closed: + return + + if exc_type: + # try to rollback, but if there are problems (connection in a bad + # state) just warn without clobbering the exception bubbling up. + try: + self.rollback() + except Exception as exc2: + logger.warning( + "error ignored in rollback on %s: %s", + self, + exc2, + ) + else: + self.commit() + + # Close the connection only if it doesn't belong to a pool. + if not getattr(self, "_pool", None): + self.close() + + @classmethod + def _get_connection_params(cls, conninfo: str, **kwargs: Any) -> Dict[str, Any]: + """Manipulate connection parameters before connecting. + + :param conninfo: Connection string as received by `~Connection.connect()`. + :param kwargs: Overriding connection arguments as received by `!connect()`. + :return: Connection arguments merged and eventually modified, in a + format similar to `~conninfo.conninfo_to_dict()`. + """ + params = conninfo_to_dict(conninfo, **kwargs) + + # Make sure there is an usable connect_timeout + if "connect_timeout" in params: + params["connect_timeout"] = int(params["connect_timeout"]) + else: + params["connect_timeout"] = None + + return params + + def close(self) -> None: + """Close the database connection.""" + if self.closed: + return + self._closed = True + self.pgconn.finish() + + @overload + def cursor(self, *, binary: bool = False) -> Cursor[Row]: + ... + + @overload + def cursor( + self, *, binary: bool = False, row_factory: RowFactory[CursorRow] + ) -> Cursor[CursorRow]: + ... + + @overload + def cursor( + self, + name: str, + *, + binary: bool = False, + scrollable: Optional[bool] = None, + withhold: bool = False, + ) -> ServerCursor[Row]: + ... + + @overload + def cursor( + self, + name: str, + *, + binary: bool = False, + row_factory: RowFactory[CursorRow], + scrollable: Optional[bool] = None, + withhold: bool = False, + ) -> ServerCursor[CursorRow]: + ... + + def cursor( + self, + name: str = "", + *, + binary: bool = False, + row_factory: Optional[RowFactory[Any]] = None, + scrollable: Optional[bool] = None, + withhold: bool = False, + ) -> Union[Cursor[Any], ServerCursor[Any]]: + """ + Return a new cursor to send commands and queries to the connection. + """ + self._check_connection_ok() + + if not row_factory: + row_factory = self.row_factory + + cur: Union[Cursor[Any], ServerCursor[Any]] + if name: + cur = self.server_cursor_factory( + self, + name=name, + row_factory=row_factory, + scrollable=scrollable, + withhold=withhold, + ) + else: + cur = self.cursor_factory(self, row_factory=row_factory) + + if binary: + cur.format = BINARY + + return cur + + def execute( + self, + query: Query, + params: Optional[Params] = None, + *, + prepare: Optional[bool] = None, + binary: bool = False, + ) -> Cursor[Row]: + """Execute a query and return a cursor to read its results.""" + try: + cur = self.cursor() + if binary: + cur.format = BINARY + + return cur.execute(query, params, prepare=prepare) + + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + def commit(self) -> None: + """Commit any pending transaction to the database.""" + with self.lock: + self.wait(self._commit_gen()) + + def rollback(self) -> None: + """Roll back to the start of any pending transaction.""" + with self.lock: + self.wait(self._rollback_gen()) + + @contextmanager + def transaction( + self, + savepoint_name: Optional[str] = None, + force_rollback: bool = False, + ) -> Iterator[Transaction]: + """ + Start a context block with a new transaction or nested transaction. + + :param savepoint_name: Name of the savepoint used to manage a nested + transaction. If `!None`, one will be chosen automatically. + :param force_rollback: Roll back the transaction at the end of the + block even if there were no error (e.g. to try a no-op process). + :rtype: Transaction + """ + tx = Transaction(self, savepoint_name, force_rollback) + if self._pipeline: + with self.pipeline(), tx, self.pipeline(): + yield tx + else: + with tx: + yield tx + + def notifies(self) -> Generator[Notify, None, None]: + """ + Yield `Notify` objects as soon as they are received from the database. + """ + while True: + with self.lock: + try: + ns = self.wait(notifies(self.pgconn)) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + enc = pgconn_encoding(self.pgconn) + for pgn in ns: + n = Notify(pgn.relname.decode(enc), pgn.extra.decode(enc), pgn.be_pid) + yield n + + @contextmanager + def pipeline(self) -> Iterator[Pipeline]: + """Switch the connection into pipeline mode.""" + with self.lock: + self._check_connection_ok() + + pipeline = self._pipeline + if pipeline is None: + # WARNING: reference loop, broken ahead. + pipeline = self._pipeline = Pipeline(self) + + try: + with pipeline: + yield pipeline + finally: + if pipeline.level == 0: + with self.lock: + assert pipeline is self._pipeline + self._pipeline = None + + def wait(self, gen: PQGen[RV], timeout: Optional[float] = 0.1) -> RV: + """ + Consume a generator operating on the connection. + + The function must be used on generators that don't change connection + fd (i.e. not on connect and reset). + """ + try: + return waiting.wait(gen, self.pgconn.socket, timeout=timeout) + except KeyboardInterrupt: + # On Ctrl-C, try to cancel the query in the server, otherwise + # the connection will remain stuck in ACTIVE state. + c = self.pgconn.get_cancel() + c.cancel() + try: + waiting.wait(gen, self.pgconn.socket, timeout=timeout) + except e.QueryCanceled: + pass # as expected + raise + + @classmethod + def _wait_conn(cls, gen: PQGenConn[RV], timeout: Optional[int]) -> RV: + """Consume a connection generator.""" + return waiting.wait_conn(gen, timeout=timeout) + + def _set_autocommit(self, value: bool) -> None: + with self.lock: + self.wait(self._set_autocommit_gen(value)) + + def _set_isolation_level(self, value: Optional[IsolationLevel]) -> None: + with self.lock: + self.wait(self._set_isolation_level_gen(value)) + + def _set_read_only(self, value: Optional[bool]) -> None: + with self.lock: + self.wait(self._set_read_only_gen(value)) + + def _set_deferrable(self, value: Optional[bool]) -> None: + with self.lock: + self.wait(self._set_deferrable_gen(value)) + + def tpc_begin(self, xid: Union[Xid, str]) -> None: + """ + Begin a TPC transaction with the given transaction ID `!xid`. + """ + with self.lock: + self.wait(self._tpc_begin_gen(xid)) + + def tpc_prepare(self) -> None: + """ + Perform the first phase of a transaction started with `tpc_begin()`. + """ + try: + with self.lock: + self.wait(self._tpc_prepare_gen()) + except e.ObjectNotInPrerequisiteState as ex: + raise e.NotSupportedError(str(ex)) from None + + def tpc_commit(self, xid: Union[Xid, str, None] = None) -> None: + """ + Commit a prepared two-phase transaction. + """ + with self.lock: + self.wait(self._tpc_finish_gen("COMMIT", xid)) + + def tpc_rollback(self, xid: Union[Xid, str, None] = None) -> None: + """ + Roll back a prepared two-phase transaction. + """ + with self.lock: + self.wait(self._tpc_finish_gen("ROLLBACK", xid)) + + def tpc_recover(self) -> List[Xid]: + self._check_tpc() + status = self.info.transaction_status + with self.cursor(row_factory=args_row(Xid._from_record)) as cur: + cur.execute(Xid._get_recover_query()) + res = cur.fetchall() + + if status == IDLE and self.info.transaction_status == INTRANS: + self.rollback() + + return res diff --git a/lib/python3.11/site-packages/psycopg/connection_async.py b/lib/python3.11/site-packages/psycopg/connection_async.py new file mode 100644 index 0000000..2490480 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/connection_async.py @@ -0,0 +1,431 @@ +""" +psycopg async connection objects +""" + +# Copyright (C) 2020 The Psycopg Team + +import sys +import asyncio +import logging +from types import TracebackType +from typing import Any, AsyncGenerator, AsyncIterator, Dict, List, Optional +from typing import Type, TypeVar, Union, cast, overload, TYPE_CHECKING +from contextlib import asynccontextmanager + +from . import pq +from . import errors as e +from . import waiting +from .abc import AdaptContext, Params, PQGen, PQGenConn, Query, RV +from ._tpc import Xid +from .rows import Row, AsyncRowFactory, tuple_row, TupleRow, args_row +from .adapt import AdaptersMap +from ._enums import IsolationLevel +from .conninfo import make_conninfo, conninfo_to_dict, resolve_hostaddr_async +from ._pipeline import AsyncPipeline +from ._encodings import pgconn_encoding +from .connection import BaseConnection, CursorRow, Notify +from .generators import notifies +from .transaction import AsyncTransaction +from .cursor_async import AsyncCursor +from .server_cursor import AsyncServerCursor + +if TYPE_CHECKING: + from .pq.abc import PGconn + +TEXT = pq.Format.TEXT +BINARY = pq.Format.BINARY + +IDLE = pq.TransactionStatus.IDLE +INTRANS = pq.TransactionStatus.INTRANS + +logger = logging.getLogger("psycopg") + + +class AsyncConnection(BaseConnection[Row]): + """ + Asynchronous wrapper for a connection to the database. + """ + + __module__ = "psycopg" + + cursor_factory: Type[AsyncCursor[Row]] + server_cursor_factory: Type[AsyncServerCursor[Row]] + row_factory: AsyncRowFactory[Row] + _pipeline: Optional[AsyncPipeline] + _Self = TypeVar("_Self", bound="AsyncConnection[Any]") + + def __init__( + self, + pgconn: "PGconn", + row_factory: AsyncRowFactory[Row] = cast(AsyncRowFactory[Row], tuple_row), + ): + super().__init__(pgconn) + self.row_factory = row_factory + self.lock = asyncio.Lock() + self.cursor_factory = AsyncCursor + self.server_cursor_factory = AsyncServerCursor + + @overload + @classmethod + async def connect( + cls, + conninfo: str = "", + *, + autocommit: bool = False, + prepare_threshold: Optional[int] = 5, + row_factory: AsyncRowFactory[Row], + cursor_factory: Optional[Type[AsyncCursor[Row]]] = None, + context: Optional[AdaptContext] = None, + **kwargs: Union[None, int, str], + ) -> "AsyncConnection[Row]": + # TODO: returned type should be _Self. See #308. + ... + + @overload + @classmethod + async def connect( + cls, + conninfo: str = "", + *, + autocommit: bool = False, + prepare_threshold: Optional[int] = 5, + cursor_factory: Optional[Type[AsyncCursor[Any]]] = None, + context: Optional[AdaptContext] = None, + **kwargs: Union[None, int, str], + ) -> "AsyncConnection[TupleRow]": + ... + + @classmethod # type: ignore[misc] # https://github.com/python/mypy/issues/11004 + async def connect( + cls, + conninfo: str = "", + *, + autocommit: bool = False, + prepare_threshold: Optional[int] = 5, + context: Optional[AdaptContext] = None, + row_factory: Optional[AsyncRowFactory[Row]] = None, + cursor_factory: Optional[Type[AsyncCursor[Row]]] = None, + **kwargs: Any, + ) -> "AsyncConnection[Any]": + if sys.platform == "win32": + loop = asyncio.get_running_loop() + if isinstance(loop, asyncio.ProactorEventLoop): + raise e.InterfaceError( + "Psycopg cannot use the 'ProactorEventLoop' to run in async" + " mode. Please use a compatible event loop, for instance by" + " setting 'asyncio.set_event_loop_policy" + "(WindowsSelectorEventLoopPolicy())'" + ) + + params = await cls._get_connection_params(conninfo, **kwargs) + conninfo = make_conninfo(**params) + + try: + rv = await cls._wait_conn( + cls._connect_gen(conninfo, autocommit=autocommit), + timeout=params["connect_timeout"], + ) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + if row_factory: + rv.row_factory = row_factory + if cursor_factory: + rv.cursor_factory = cursor_factory + if context: + rv._adapters = AdaptersMap(context.adapters) + rv.prepare_threshold = prepare_threshold + return rv + + async def __aenter__(self: _Self) -> _Self: + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + if self.closed: + return + + if exc_type: + # try to rollback, but if there are problems (connection in a bad + # state) just warn without clobbering the exception bubbling up. + try: + await self.rollback() + except Exception as exc2: + logger.warning( + "error ignored in rollback on %s: %s", + self, + exc2, + ) + else: + await self.commit() + + # Close the connection only if it doesn't belong to a pool. + if not getattr(self, "_pool", None): + await self.close() + + @classmethod + async def _get_connection_params( + cls, conninfo: str, **kwargs: Any + ) -> Dict[str, Any]: + """Manipulate connection parameters before connecting. + + .. versionchanged:: 3.1 + Unlike the sync counterpart, perform non-blocking address + resolution and populate the ``hostaddr`` connection parameter, + unless the user has provided one themselves. See + `~psycopg._dns.resolve_hostaddr_async()` for details. + + """ + params = conninfo_to_dict(conninfo, **kwargs) + + # Make sure there is an usable connect_timeout + if "connect_timeout" in params: + params["connect_timeout"] = int(params["connect_timeout"]) + else: + params["connect_timeout"] = None + + # Resolve host addresses in non-blocking way + params = await resolve_hostaddr_async(params) + + return params + + async def close(self) -> None: + if self.closed: + return + self._closed = True + self.pgconn.finish() + + @overload + def cursor(self, *, binary: bool = False) -> AsyncCursor[Row]: + ... + + @overload + def cursor( + self, *, binary: bool = False, row_factory: AsyncRowFactory[CursorRow] + ) -> AsyncCursor[CursorRow]: + ... + + @overload + def cursor( + self, + name: str, + *, + binary: bool = False, + scrollable: Optional[bool] = None, + withhold: bool = False, + ) -> AsyncServerCursor[Row]: + ... + + @overload + def cursor( + self, + name: str, + *, + binary: bool = False, + row_factory: AsyncRowFactory[CursorRow], + scrollable: Optional[bool] = None, + withhold: bool = False, + ) -> AsyncServerCursor[CursorRow]: + ... + + def cursor( + self, + name: str = "", + *, + binary: bool = False, + row_factory: Optional[AsyncRowFactory[Any]] = None, + scrollable: Optional[bool] = None, + withhold: bool = False, + ) -> Union[AsyncCursor[Any], AsyncServerCursor[Any]]: + """ + Return a new `AsyncCursor` to send commands and queries to the connection. + """ + self._check_connection_ok() + + if not row_factory: + row_factory = self.row_factory + + cur: Union[AsyncCursor[Any], AsyncServerCursor[Any]] + if name: + cur = self.server_cursor_factory( + self, + name=name, + row_factory=row_factory, + scrollable=scrollable, + withhold=withhold, + ) + else: + cur = self.cursor_factory(self, row_factory=row_factory) + + if binary: + cur.format = BINARY + + return cur + + async def execute( + self, + query: Query, + params: Optional[Params] = None, + *, + prepare: Optional[bool] = None, + binary: bool = False, + ) -> AsyncCursor[Row]: + try: + cur = self.cursor() + if binary: + cur.format = BINARY + + return await cur.execute(query, params, prepare=prepare) + + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + async def commit(self) -> None: + async with self.lock: + await self.wait(self._commit_gen()) + + async def rollback(self) -> None: + async with self.lock: + await self.wait(self._rollback_gen()) + + @asynccontextmanager + async def transaction( + self, + savepoint_name: Optional[str] = None, + force_rollback: bool = False, + ) -> AsyncIterator[AsyncTransaction]: + """ + Start a context block with a new transaction or nested transaction. + + :rtype: AsyncTransaction + """ + tx = AsyncTransaction(self, savepoint_name, force_rollback) + if self._pipeline: + async with self.pipeline(), tx, self.pipeline(): + yield tx + else: + async with tx: + yield tx + + async def notifies(self) -> AsyncGenerator[Notify, None]: + while True: + async with self.lock: + try: + ns = await self.wait(notifies(self.pgconn)) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + enc = pgconn_encoding(self.pgconn) + for pgn in ns: + n = Notify(pgn.relname.decode(enc), pgn.extra.decode(enc), pgn.be_pid) + yield n + + @asynccontextmanager + async def pipeline(self) -> AsyncIterator[AsyncPipeline]: + """Context manager to switch the connection into pipeline mode.""" + async with self.lock: + self._check_connection_ok() + + pipeline = self._pipeline + if pipeline is None: + # WARNING: reference loop, broken ahead. + pipeline = self._pipeline = AsyncPipeline(self) + + try: + async with pipeline: + yield pipeline + finally: + if pipeline.level == 0: + async with self.lock: + assert pipeline is self._pipeline + self._pipeline = None + + async def wait(self, gen: PQGen[RV]) -> RV: + try: + return await waiting.wait_async(gen, self.pgconn.socket) + except (asyncio.CancelledError, KeyboardInterrupt): + # On Ctrl-C, try to cancel the query in the server, otherwise + # the connection will remain stuck in ACTIVE state. + c = self.pgconn.get_cancel() + c.cancel() + try: + await waiting.wait_async(gen, self.pgconn.socket) + except e.QueryCanceled: + pass # as expected + raise + + @classmethod + async def _wait_conn(cls, gen: PQGenConn[RV], timeout: Optional[int]) -> RV: + return await waiting.wait_conn_async(gen, timeout) + + def _set_autocommit(self, value: bool) -> None: + self._no_set_async("autocommit") + + async def set_autocommit(self, value: bool) -> None: + """Async version of the `~Connection.autocommit` setter.""" + async with self.lock: + await self.wait(self._set_autocommit_gen(value)) + + def _set_isolation_level(self, value: Optional[IsolationLevel]) -> None: + self._no_set_async("isolation_level") + + async def set_isolation_level(self, value: Optional[IsolationLevel]) -> None: + """Async version of the `~Connection.isolation_level` setter.""" + async with self.lock: + await self.wait(self._set_isolation_level_gen(value)) + + def _set_read_only(self, value: Optional[bool]) -> None: + self._no_set_async("read_only") + + async def set_read_only(self, value: Optional[bool]) -> None: + """Async version of the `~Connection.read_only` setter.""" + async with self.lock: + await self.wait(self._set_read_only_gen(value)) + + def _set_deferrable(self, value: Optional[bool]) -> None: + self._no_set_async("deferrable") + + async def set_deferrable(self, value: Optional[bool]) -> None: + """Async version of the `~Connection.deferrable` setter.""" + async with self.lock: + await self.wait(self._set_deferrable_gen(value)) + + def _no_set_async(self, attribute: str) -> None: + raise AttributeError( + f"'the {attribute!r} property is read-only on async connections:" + f" please use 'await .set_{attribute}()' instead." + ) + + async def tpc_begin(self, xid: Union[Xid, str]) -> None: + async with self.lock: + await self.wait(self._tpc_begin_gen(xid)) + + async def tpc_prepare(self) -> None: + try: + async with self.lock: + await self.wait(self._tpc_prepare_gen()) + except e.ObjectNotInPrerequisiteState as ex: + raise e.NotSupportedError(str(ex)) from None + + async def tpc_commit(self, xid: Union[Xid, str, None] = None) -> None: + async with self.lock: + await self.wait(self._tpc_finish_gen("commit", xid)) + + async def tpc_rollback(self, xid: Union[Xid, str, None] = None) -> None: + async with self.lock: + await self.wait(self._tpc_finish_gen("rollback", xid)) + + async def tpc_recover(self) -> List[Xid]: + self._check_tpc() + status = self.info.transaction_status + async with self.cursor(row_factory=args_row(Xid._from_record)) as cur: + await cur.execute(Xid._get_recover_query()) + res = await cur.fetchall() + + if status == IDLE and self.info.transaction_status == INTRANS: + await self.rollback() + + return res diff --git a/lib/python3.11/site-packages/psycopg/conninfo.py b/lib/python3.11/site-packages/psycopg/conninfo.py new file mode 100644 index 0000000..3b21f83 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/conninfo.py @@ -0,0 +1,378 @@ +""" +Functions to manipulate conninfo strings +""" + +# Copyright (C) 2020 The Psycopg Team + +import os +import re +import socket +import asyncio +from typing import Any, Dict, List, Optional +from pathlib import Path +from datetime import tzinfo +from functools import lru_cache +from ipaddress import ip_address + +from . import pq +from . import errors as e +from ._tz import get_tzinfo +from ._encodings import pgconn_encoding + + +def make_conninfo(conninfo: str = "", **kwargs: Any) -> str: + """ + Merge a string and keyword params into a single conninfo string. + + :param conninfo: A `connection string`__ as accepted by PostgreSQL. + :param kwargs: Parameters overriding the ones specified in `!conninfo`. + :return: A connection string valid for PostgreSQL, with the `!kwargs` + parameters merged. + + Raise `~psycopg.ProgrammingError` if the input doesn't make a valid + conninfo string. + + .. __: https://www.postgresql.org/docs/current/libpq-connect.html + #LIBPQ-CONNSTRING + """ + if not conninfo and not kwargs: + return "" + + # If no kwarg specified don't mung the conninfo but check if it's correct. + # Make sure to return a string, not a subtype, to avoid making Liskov sad. + if not kwargs: + _parse_conninfo(conninfo) + return str(conninfo) + + # Override the conninfo with the parameters + # Drop the None arguments + kwargs = {k: v for (k, v) in kwargs.items() if v is not None} + + if conninfo: + tmp = conninfo_to_dict(conninfo) + tmp.update(kwargs) + kwargs = tmp + + conninfo = " ".join(f"{k}={_param_escape(str(v))}" for (k, v) in kwargs.items()) + + # Verify the result is valid + _parse_conninfo(conninfo) + + return conninfo + + +def conninfo_to_dict(conninfo: str = "", **kwargs: Any) -> Dict[str, Any]: + """ + Convert the `!conninfo` string into a dictionary of parameters. + + :param conninfo: A `connection string`__ as accepted by PostgreSQL. + :param kwargs: Parameters overriding the ones specified in `!conninfo`. + :return: Dictionary with the parameters parsed from `!conninfo` and + `!kwargs`. + + Raise `~psycopg.ProgrammingError` if `!conninfo` is not a a valid connection + string. + + .. __: https://www.postgresql.org/docs/current/libpq-connect.html + #LIBPQ-CONNSTRING + """ + opts = _parse_conninfo(conninfo) + rv = {opt.keyword.decode(): opt.val.decode() for opt in opts if opt.val is not None} + for k, v in kwargs.items(): + if v is not None: + rv[k] = v + return rv + + +def _parse_conninfo(conninfo: str) -> List[pq.ConninfoOption]: + """ + Verify that `!conninfo` is a valid connection string. + + Raise ProgrammingError if the string is not valid. + + Return the result of pq.Conninfo.parse() on success. + """ + try: + return pq.Conninfo.parse(conninfo.encode()) + except e.OperationalError as ex: + raise e.ProgrammingError(str(ex)) + + +re_escape = re.compile(r"([\\'])") +re_space = re.compile(r"\s") + + +def _param_escape(s: str) -> str: + """ + Apply the escaping rule required by PQconnectdb + """ + if not s: + return "''" + + s = re_escape.sub(r"\\\1", s) + if re_space.search(s): + s = "'" + s + "'" + + return s + + +class ConnectionInfo: + """Allow access to information about the connection.""" + + __module__ = "psycopg" + + def __init__(self, pgconn: pq.abc.PGconn): + self.pgconn = pgconn + + @property + def vendor(self) -> str: + """A string representing the database vendor connected to.""" + return "PostgreSQL" + + @property + def host(self) -> str: + """The server host name of the active connection. See :pq:`PQhost()`.""" + return self._get_pgconn_attr("host") + + @property + def hostaddr(self) -> str: + """The server IP address of the connection. See :pq:`PQhostaddr()`.""" + return self._get_pgconn_attr("hostaddr") + + @property + def port(self) -> int: + """The port of the active connection. See :pq:`PQport()`.""" + return int(self._get_pgconn_attr("port")) + + @property + def dbname(self) -> str: + """The database name of the connection. See :pq:`PQdb()`.""" + return self._get_pgconn_attr("db") + + @property + def user(self) -> str: + """The user name of the connection. See :pq:`PQuser()`.""" + return self._get_pgconn_attr("user") + + @property + def password(self) -> str: + """The password of the connection. See :pq:`PQpass()`.""" + return self._get_pgconn_attr("password") + + @property + def options(self) -> str: + """ + The command-line options passed in the connection request. + See :pq:`PQoptions`. + """ + return self._get_pgconn_attr("options") + + def get_parameters(self) -> Dict[str, str]: + """Return the connection parameters values. + + Return all the parameters set to a non-default value, which might come + either from the connection string and parameters passed to + `~Connection.connect()` or from environment variables. The password + is never returned (you can read it using the `password` attribute). + """ + pyenc = self.encoding + + # Get the known defaults to avoid reporting them + defaults = { + i.keyword: i.compiled + for i in pq.Conninfo.get_defaults() + if i.compiled is not None + } + # Not returned by the libq. Bug? Bet we're using SSH. + defaults.setdefault(b"channel_binding", b"prefer") + defaults[b"passfile"] = str(Path.home() / ".pgpass").encode() + + return { + i.keyword.decode(pyenc): i.val.decode(pyenc) + for i in self.pgconn.info + if i.val is not None + and i.keyword != b"password" + and i.val != defaults.get(i.keyword) + } + + @property + def dsn(self) -> str: + """Return the connection string to connect to the database. + + The string contains all the parameters set to a non-default value, + which might come either from the connection string and parameters + passed to `~Connection.connect()` or from environment variables. The + password is never returned (you can read it using the `password` + attribute). + """ + return make_conninfo(**self.get_parameters()) + + @property + def status(self) -> pq.ConnStatus: + """The status of the connection. See :pq:`PQstatus()`.""" + return pq.ConnStatus(self.pgconn.status) + + @property + def transaction_status(self) -> pq.TransactionStatus: + """ + The current in-transaction status of the session. + See :pq:`PQtransactionStatus()`. + """ + return pq.TransactionStatus(self.pgconn.transaction_status) + + @property + def pipeline_status(self) -> pq.PipelineStatus: + """ + The current pipeline status of the client. + See :pq:`PQpipelineStatus()`. + """ + return pq.PipelineStatus(self.pgconn.pipeline_status) + + def parameter_status(self, param_name: str) -> Optional[str]: + """ + Return a parameter setting of the connection. + + Return `None` is the parameter is unknown. + """ + res = self.pgconn.parameter_status(param_name.encode(self.encoding)) + return res.decode(self.encoding) if res is not None else None + + @property + def server_version(self) -> int: + """ + An integer representing the server version. See :pq:`PQserverVersion()`. + """ + return self.pgconn.server_version + + @property + def backend_pid(self) -> int: + """ + The process ID (PID) of the backend process handling this connection. + See :pq:`PQbackendPID()`. + """ + return self.pgconn.backend_pid + + @property + def error_message(self) -> str: + """ + The error message most recently generated by an operation on the connection. + See :pq:`PQerrorMessage()`. + """ + return self._get_pgconn_attr("error_message") + + @property + def timezone(self) -> tzinfo: + """The Python timezone info of the connection's timezone.""" + return get_tzinfo(self.pgconn) + + @property + def encoding(self) -> str: + """The Python codec name of the connection's client encoding.""" + return pgconn_encoding(self.pgconn) + + def _get_pgconn_attr(self, name: str) -> str: + value: bytes = getattr(self.pgconn, name) + return value.decode(self.encoding) + + +async def resolve_hostaddr_async(params: Dict[str, Any]) -> Dict[str, Any]: + """ + Perform async DNS lookup of the hosts and return a new params dict. + + :param params: The input parameters, for instance as returned by + `~psycopg.conninfo.conninfo_to_dict()`. + + If a ``host`` param is present but not ``hostname``, resolve the host + addresses dynamically. + + The function may change the input ``host``, ``hostname``, ``port`` to allow + connecting without further DNS lookups, eventually removing hosts that are + not resolved, keeping the lists of hosts and ports consistent. + + Raise `~psycopg.OperationalError` if connection is not possible (e.g. no + host resolve, inconsistent lists length). + """ + hostaddr_arg = params.get("hostaddr", os.environ.get("PGHOSTADDR", "")) + if hostaddr_arg: + # Already resolved + return params + + host_arg: str = params.get("host", os.environ.get("PGHOST", "")) + if not host_arg: + # Nothing to resolve + return params + + hosts_in = host_arg.split(",") + port_arg: str = str(params.get("port", os.environ.get("PGPORT", ""))) + ports_in = port_arg.split(",") if port_arg else [] + default_port = "5432" + + if len(ports_in) == 1: + # If only one port is specified, the libpq will apply it to all + # the hosts, so don't mangle it. + default_port = ports_in.pop() + + elif len(ports_in) > 1: + if len(ports_in) != len(hosts_in): + # ProgrammingError would have been more appropriate, but this is + # what the raise if the libpq fails connect in the same case. + raise e.OperationalError( + f"cannot match {len(hosts_in)} hosts with {len(ports_in)} port numbers" + ) + ports_out = [] + + hosts_out = [] + hostaddr_out = [] + loop = asyncio.get_running_loop() + for i, host in enumerate(hosts_in): + if not host or host.startswith("/") or host[1:2] == ":": + # Local path + hosts_out.append(host) + hostaddr_out.append("") + if ports_in: + ports_out.append(ports_in[i]) + continue + + # If the host is already an ip address don't try to resolve it + if is_ip_address(host): + hosts_out.append(host) + hostaddr_out.append(host) + if ports_in: + ports_out.append(ports_in[i]) + continue + + try: + port = ports_in[i] if ports_in else default_port + ans = await loop.getaddrinfo( + host, port, proto=socket.IPPROTO_TCP, type=socket.SOCK_STREAM + ) + except OSError as ex: + last_exc = ex + else: + for item in ans: + hosts_out.append(host) + hostaddr_out.append(item[4][0]) + if ports_in: + ports_out.append(ports_in[i]) + + # Throw an exception if no host could be resolved + if not hosts_out: + raise e.OperationalError(str(last_exc)) + + out = params.copy() + out["host"] = ",".join(hosts_out) + out["hostaddr"] = ",".join(hostaddr_out) + if ports_in: + out["port"] = ",".join(ports_out) + + return out + + +@lru_cache() +def is_ip_address(s: str) -> bool: + """Return True if the string represent a valid ip address.""" + try: + ip_address(s) + except ValueError: + return False + return True diff --git a/lib/python3.11/site-packages/psycopg/copy.py b/lib/python3.11/site-packages/psycopg/copy.py new file mode 100644 index 0000000..26a2d9e --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/copy.py @@ -0,0 +1,902 @@ +""" +psycopg copy support +""" + +# Copyright (C) 2020 The Psycopg Team + +import re +import queue +import struct +import asyncio +import threading +from abc import ABC, abstractmethod +from types import TracebackType +from typing import Any, AsyncIterator, Dict, Generic, Iterator, List, Match, IO +from typing import Optional, Sequence, Tuple, Type, TypeVar, Union, TYPE_CHECKING + +from . import pq +from . import adapt +from . import errors as e +from .abc import Buffer, ConnectionType, PQGen, Transformer +from ._compat import create_task +from ._cmodule import _psycopg +from ._encodings import pgconn_encoding +from .generators import copy_from, copy_to, copy_end + +if TYPE_CHECKING: + from .cursor import BaseCursor, Cursor + from .cursor_async import AsyncCursor + from .connection import Connection # noqa: F401 + from .connection_async import AsyncConnection # noqa: F401 + +PY_TEXT = adapt.PyFormat.TEXT +PY_BINARY = adapt.PyFormat.BINARY + +TEXT = pq.Format.TEXT +BINARY = pq.Format.BINARY + +COPY_IN = pq.ExecStatus.COPY_IN +COPY_OUT = pq.ExecStatus.COPY_OUT + +ACTIVE = pq.TransactionStatus.ACTIVE + +# Size of data to accumulate before sending it down the network. We fill a +# buffer this size field by field, and when it passes the threshold size +# we ship it, so it may end up being bigger than this. +BUFFER_SIZE = 32 * 1024 + +# Maximum data size we want to queue to send to the libpq copy. Sending a +# buffer too big to be handled can cause an infinite loop in the libpq +# (#255) so we want to split it in more digestable chunks. +MAX_BUFFER_SIZE = 4 * BUFFER_SIZE +# Note: making this buffer too large, e.g. +# MAX_BUFFER_SIZE = 1024 * 1024 +# makes operations *way* slower! Probably triggering some quadraticity +# in the libpq memory management and data sending. + +# Max size of the write queue of buffers. More than that copy will block +# Each buffer should be around BUFFER_SIZE size. +QUEUE_SIZE = 1024 + + +class BaseCopy(Generic[ConnectionType]): + """ + Base implementation for the copy user interface. + + Two subclasses expose real methods with the sync/async differences. + + The difference between the text and binary format is managed by two + different `Formatter` subclasses. + + Writing (the I/O part) is implemented in the subclasses by a `Writer` or + `AsyncWriter` instance. Normally writing implies sending copy data to a + database, but a different writer might be chosen, e.g. to stream data into + a file for later use. + """ + + _Self = TypeVar("_Self", bound="BaseCopy[Any]") + + formatter: "Formatter" + + def __init__( + self, + cursor: "BaseCursor[ConnectionType, Any]", + *, + binary: Optional[bool] = None, + ): + self.cursor = cursor + self.connection = cursor.connection + self._pgconn = self.connection.pgconn + + result = cursor.pgresult + if result: + self._direction = result.status + if self._direction != COPY_IN and self._direction != COPY_OUT: + raise e.ProgrammingError( + "the cursor should have performed a COPY operation;" + f" its status is {pq.ExecStatus(self._direction).name} instead" + ) + else: + self._direction = COPY_IN + + if binary is None: + binary = bool(result and result.binary_tuples) + + tx: Transformer = getattr(cursor, "_tx", None) or adapt.Transformer(cursor) + if binary: + self.formatter = BinaryFormatter(tx) + else: + self.formatter = TextFormatter(tx, encoding=pgconn_encoding(self._pgconn)) + + self._finished = False + + def __repr__(self) -> str: + cls = f"{self.__class__.__module__}.{self.__class__.__qualname__}" + info = pq.misc.connection_summary(self._pgconn) + return f"<{cls} {info} at 0x{id(self):x}>" + + def _enter(self) -> None: + if self._finished: + raise TypeError("copy blocks can be used only once") + + def set_types(self, types: Sequence[Union[int, str]]) -> None: + """ + Set the types expected in a COPY operation. + + The types must be specified as a sequence of oid or PostgreSQL type + names (e.g. ``int4``, ``timestamptz[]``). + + This operation overcomes the lack of metadata returned by PostgreSQL + when a COPY operation begins: + + - On :sql:`COPY TO`, `!set_types()` allows to specify what types the + operation returns. If `!set_types()` is not used, the data will be + returned as unparsed strings or bytes instead of Python objects. + + - On :sql:`COPY FROM`, `!set_types()` allows to choose what type the + database expects. This is especially useful in binary copy, because + PostgreSQL will apply no cast rule. + + """ + registry = self.cursor.adapters.types + oids = [t if isinstance(t, int) else registry.get_oid(t) for t in types] + + if self._direction == COPY_IN: + self.formatter.transformer.set_dumper_types(oids, self.formatter.format) + else: + self.formatter.transformer.set_loader_types(oids, self.formatter.format) + + # High level copy protocol generators (state change of the Copy object) + + def _read_gen(self) -> PQGen[Buffer]: + if self._finished: + return memoryview(b"") + + res = yield from copy_from(self._pgconn) + if isinstance(res, memoryview): + return res + + # res is the final PGresult + self._finished = True + + # This result is a COMMAND_OK which has info about the number of rows + # returned, but not about the columns, which is instead an information + # that was received on the COPY_OUT result at the beginning of COPY. + # So, don't replace the results in the cursor, just update the rowcount. + nrows = res.command_tuples + self.cursor._rowcount = nrows if nrows is not None else -1 + return memoryview(b"") + + def _read_row_gen(self) -> PQGen[Optional[Tuple[Any, ...]]]: + data = yield from self._read_gen() + if not data: + return None + + row = self.formatter.parse_row(data) + if row is None: + # Get the final result to finish the copy operation + yield from self._read_gen() + self._finished = True + return None + + return row + + def _end_copy_out_gen(self, exc: Optional[BaseException]) -> PQGen[None]: + if not exc: + return + + if self._pgconn.transaction_status != ACTIVE: + # The server has already finished to send copy data. The connection + # is already in a good state. + return + + # Throw a cancel to the server, then consume the rest of the copy data + # (which might or might not have been already transferred entirely to + # the client, so we won't necessary see the exception associated with + # canceling). + self.connection.cancel() + try: + while (yield from self._read_gen()): + pass + except e.QueryCanceled: + pass + + +class Copy(BaseCopy["Connection[Any]"]): + """Manage a :sql:`COPY` operation. + + :param cursor: the cursor where the operation is performed. + :param binary: if `!True`, write binary format. + :param writer: the object to write to destination. If not specified, write + to the `!cursor` connection. + + Choosing `!binary` is not necessary if the cursor has executed a + :sql:`COPY` operation, because the operation result describes the format + too. The parameter is useful when a `!Copy` object is created manually and + no operation is performed on the cursor, such as when using ``writer=``\\ + `~psycopg.copy.FileWriter`. + + """ + + __module__ = "psycopg" + + writer: "Writer" + + def __init__( + self, + cursor: "Cursor[Any]", + *, + binary: Optional[bool] = None, + writer: Optional["Writer"] = None, + ): + super().__init__(cursor, binary=binary) + if not writer: + writer = LibpqWriter(cursor) + + self.writer = writer + self._write = writer.write + + def __enter__(self: BaseCopy._Self) -> BaseCopy._Self: + self._enter() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.finish(exc_val) + + # End user sync interface + + def __iter__(self) -> Iterator[Buffer]: + """Implement block-by-block iteration on :sql:`COPY TO`.""" + while True: + data = self.read() + if not data: + break + yield data + + def read(self) -> Buffer: + """ + Read an unparsed row after a :sql:`COPY TO` operation. + + Return an empty string when the data is finished. + """ + return self.connection.wait(self._read_gen()) + + def rows(self) -> Iterator[Tuple[Any, ...]]: + """ + Iterate on the result of a :sql:`COPY TO` operation record by record. + + Note that the records returned will be tuples of unparsed strings or + bytes, unless data types are specified using `set_types()`. + """ + while True: + record = self.read_row() + if record is None: + break + yield record + + def read_row(self) -> Optional[Tuple[Any, ...]]: + """ + Read a parsed row of data from a table after a :sql:`COPY TO` operation. + + Return `!None` when the data is finished. + + Note that the records returned will be tuples of unparsed strings or + bytes, unless data types are specified using `set_types()`. + """ + return self.connection.wait(self._read_row_gen()) + + def write(self, buffer: Union[Buffer, str]) -> None: + """ + Write a block of data to a table after a :sql:`COPY FROM` operation. + + If the :sql:`COPY` is in binary format `!buffer` must be `!bytes`. In + text mode it can be either `!bytes` or `!str`. + """ + data = self.formatter.write(buffer) + if data: + self._write(data) + + def write_row(self, row: Sequence[Any]) -> None: + """Write a record to a table after a :sql:`COPY FROM` operation.""" + data = self.formatter.write_row(row) + if data: + self._write(data) + + def finish(self, exc: Optional[BaseException]) -> None: + """Terminate the copy operation and free the resources allocated. + + You shouldn't need to call this function yourself: it is usually called + by exit. It is available if, despite what is documented, you end up + using the `Copy` object outside a block. + """ + if self._direction == COPY_IN: + data = self.formatter.end() + if data: + self._write(data) + self.writer.finish(exc) + self._finished = True + else: + self.connection.wait(self._end_copy_out_gen(exc)) + + +class Writer(ABC): + """ + A class to write copy data somewhere. + """ + + @abstractmethod + def write(self, data: Buffer) -> None: + """ + Write some data to destination. + """ + ... + + def finish(self, exc: Optional[BaseException] = None) -> None: + """ + Called when write operations are finished. + + If operations finished with an error, it will be passed to ``exc``. + """ + pass + + +class LibpqWriter(Writer): + """ + A `Writer` to write copy data to a Postgres database. + """ + + def __init__(self, cursor: "Cursor[Any]"): + self.cursor = cursor + self.connection = cursor.connection + self._pgconn = self.connection.pgconn + + def write(self, data: Buffer) -> None: + if len(data) <= MAX_BUFFER_SIZE: + # Most used path: we don't need to split the buffer in smaller + # bits, so don't make a copy. + self.connection.wait(copy_to(self._pgconn, data)) + else: + # Copy a buffer too large in chunks to avoid causing a memory + # error in the libpq, which may cause an infinite loop (#255). + for i in range(0, len(data), MAX_BUFFER_SIZE): + self.connection.wait( + copy_to(self._pgconn, data[i : i + MAX_BUFFER_SIZE]) + ) + + def finish(self, exc: Optional[BaseException] = None) -> None: + bmsg: Optional[bytes] + if exc: + msg = f"error from Python: {type(exc).__qualname__} - {exc}" + bmsg = msg.encode(pgconn_encoding(self._pgconn), "replace") + else: + bmsg = None + + res = self.connection.wait(copy_end(self._pgconn, bmsg)) + self.cursor._results = [res] + + +class QueuedLibpqDriver(LibpqWriter): + """ + A writer using a buffer to queue data to write to a Postgres database. + + `write()` returns immediately, so that the main thread can be CPU-bound + formatting messages, while a worker thread can be IO-bound waiting to write + on the connection. + """ + + def __init__(self, cursor: "Cursor[Any]"): + super().__init__(cursor) + + self._queue: queue.Queue[Buffer] = queue.Queue(maxsize=QUEUE_SIZE) + self._worker: Optional[threading.Thread] = None + self._worker_error: Optional[BaseException] = None + + def worker(self) -> None: + """Push data to the server when available from the copy queue. + + Terminate reading when the queue receives a false-y value, or in case + of error. + + The function is designed to be run in a separate thread. + """ + try: + while True: + data = self._queue.get(block=True, timeout=24 * 60 * 60) + if not data: + break + self.connection.wait(copy_to(self._pgconn, data)) + except BaseException as ex: + # Propagate the error to the main thread. + self._worker_error = ex + + def write(self, data: Buffer) -> None: + if not self._worker: + # warning: reference loop, broken by _write_end + self._worker = threading.Thread(target=self.worker) + self._worker.daemon = True + self._worker.start() + + # If the worker thread raies an exception, re-raise it to the caller. + if self._worker_error: + raise self._worker_error + + if len(data) <= MAX_BUFFER_SIZE: + # Most used path: we don't need to split the buffer in smaller + # bits, so don't make a copy. + self._queue.put(data) + else: + # Copy a buffer too large in chunks to avoid causing a memory + # error in the libpq, which may cause an infinite loop (#255). + for i in range(0, len(data), MAX_BUFFER_SIZE): + self._queue.put(data[i : i + MAX_BUFFER_SIZE]) + + def finish(self, exc: Optional[BaseException] = None) -> None: + self._queue.put(b"") + + if self._worker: + self._worker.join() + self._worker = None # break the loop + + # Check if the worker thread raised any exception before terminating. + if self._worker_error: + raise self._worker_error + + super().finish(exc) + + +class FileWriter(Writer): + """ + A `Writer` to write copy data to a file-like object. + + :param file: the file where to write copy data. It must be open for writing + in binary mode. + """ + + def __init__(self, file: IO[bytes]): + self.file = file + + def write(self, data: Buffer) -> None: + self.file.write(data) + + +class AsyncCopy(BaseCopy["AsyncConnection[Any]"]): + """Manage an asynchronous :sql:`COPY` operation.""" + + __module__ = "psycopg" + + writer: "AsyncWriter" + + def __init__( + self, + cursor: "AsyncCursor[Any]", + *, + binary: Optional[bool] = None, + writer: Optional["AsyncWriter"] = None, + ): + super().__init__(cursor, binary=binary) + + if not writer: + writer = AsyncLibpqWriter(cursor) + + self.writer = writer + self._write = writer.write + + async def __aenter__(self: BaseCopy._Self) -> BaseCopy._Self: + self._enter() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.finish(exc_val) + + async def __aiter__(self) -> AsyncIterator[Buffer]: + while True: + data = await self.read() + if not data: + break + yield data + + async def read(self) -> Buffer: + return await self.connection.wait(self._read_gen()) + + async def rows(self) -> AsyncIterator[Tuple[Any, ...]]: + while True: + record = await self.read_row() + if record is None: + break + yield record + + async def read_row(self) -> Optional[Tuple[Any, ...]]: + return await self.connection.wait(self._read_row_gen()) + + async def write(self, buffer: Union[Buffer, str]) -> None: + data = self.formatter.write(buffer) + if data: + await self._write(data) + + async def write_row(self, row: Sequence[Any]) -> None: + data = self.formatter.write_row(row) + if data: + await self._write(data) + + async def finish(self, exc: Optional[BaseException]) -> None: + if self._direction == COPY_IN: + data = self.formatter.end() + if data: + await self._write(data) + await self.writer.finish(exc) + self._finished = True + else: + await self.connection.wait(self._end_copy_out_gen(exc)) + + +class AsyncWriter(ABC): + """ + A class to write copy data somewhere (for async connections). + """ + + @abstractmethod + async def write(self, data: Buffer) -> None: + ... + + async def finish(self, exc: Optional[BaseException] = None) -> None: + pass + + +class AsyncLibpqWriter(AsyncWriter): + """ + An `AsyncWriter` to write copy data to a Postgres database. + """ + + def __init__(self, cursor: "AsyncCursor[Any]"): + self.cursor = cursor + self.connection = cursor.connection + self._pgconn = self.connection.pgconn + + async def write(self, data: Buffer) -> None: + if len(data) <= MAX_BUFFER_SIZE: + # Most used path: we don't need to split the buffer in smaller + # bits, so don't make a copy. + await self.connection.wait(copy_to(self._pgconn, data)) + else: + # Copy a buffer too large in chunks to avoid causing a memory + # error in the libpq, which may cause an infinite loop (#255). + for i in range(0, len(data), MAX_BUFFER_SIZE): + await self.connection.wait( + copy_to(self._pgconn, data[i : i + MAX_BUFFER_SIZE]) + ) + + async def finish(self, exc: Optional[BaseException] = None) -> None: + bmsg: Optional[bytes] + if exc: + msg = f"error from Python: {type(exc).__qualname__} - {exc}" + bmsg = msg.encode(pgconn_encoding(self._pgconn), "replace") + else: + bmsg = None + + res = await self.connection.wait(copy_end(self._pgconn, bmsg)) + self.cursor._results = [res] + + +class AsyncQueuedLibpqWriter(AsyncLibpqWriter): + """ + An `AsyncWriter` using a buffer to queue data to write. + + `write()` returns immediately, so that the main thread can be CPU-bound + formatting messages, while a worker thread can be IO-bound waiting to write + on the connection. + """ + + def __init__(self, cursor: "AsyncCursor[Any]"): + super().__init__(cursor) + + self._queue: asyncio.Queue[Buffer] = asyncio.Queue(maxsize=QUEUE_SIZE) + self._worker: Optional[asyncio.Future[None]] = None + + async def worker(self) -> None: + """Push data to the server when available from the copy queue. + + Terminate reading when the queue receives a false-y value. + + The function is designed to be run in a separate task. + """ + while True: + data = await self._queue.get() + if not data: + break + await self.connection.wait(copy_to(self._pgconn, data)) + + async def write(self, data: Buffer) -> None: + if not self._worker: + self._worker = create_task(self.worker()) + + if len(data) <= MAX_BUFFER_SIZE: + # Most used path: we don't need to split the buffer in smaller + # bits, so don't make a copy. + await self._queue.put(data) + else: + # Copy a buffer too large in chunks to avoid causing a memory + # error in the libpq, which may cause an infinite loop (#255). + for i in range(0, len(data), MAX_BUFFER_SIZE): + await self._queue.put(data[i : i + MAX_BUFFER_SIZE]) + + async def finish(self, exc: Optional[BaseException] = None) -> None: + await self._queue.put(b"") + + if self._worker: + await asyncio.gather(self._worker) + self._worker = None # break reference loops if any + + await super().finish(exc) + + +class Formatter(ABC): + """ + A class which understand a copy format (text, binary). + """ + + format: pq.Format + + def __init__(self, transformer: Transformer): + self.transformer = transformer + self._write_buffer = bytearray() + self._row_mode = False # true if the user is using write_row() + + @abstractmethod + def parse_row(self, data: Buffer) -> Optional[Tuple[Any, ...]]: + ... + + @abstractmethod + def write(self, buffer: Union[Buffer, str]) -> Buffer: + ... + + @abstractmethod + def write_row(self, row: Sequence[Any]) -> Buffer: + ... + + @abstractmethod + def end(self) -> Buffer: + ... + + +class TextFormatter(Formatter): + format = TEXT + + def __init__(self, transformer: Transformer, encoding: str = "utf-8"): + super().__init__(transformer) + self._encoding = encoding + + def parse_row(self, data: Buffer) -> Optional[Tuple[Any, ...]]: + if data: + return parse_row_text(data, self.transformer) + else: + return None + + def write(self, buffer: Union[Buffer, str]) -> Buffer: + data = self._ensure_bytes(buffer) + self._signature_sent = True + return data + + def write_row(self, row: Sequence[Any]) -> Buffer: + # Note down that we are writing in row mode: it means we will have + # to take care of the end-of-copy marker too + self._row_mode = True + + format_row_text(row, self.transformer, self._write_buffer) + if len(self._write_buffer) > BUFFER_SIZE: + buffer, self._write_buffer = self._write_buffer, bytearray() + return buffer + else: + return b"" + + def end(self) -> Buffer: + buffer, self._write_buffer = self._write_buffer, bytearray() + return buffer + + def _ensure_bytes(self, data: Union[Buffer, str]) -> Buffer: + if isinstance(data, str): + return data.encode(self._encoding) + else: + # Assume, for simplicity, that the user is not passing stupid + # things to the write function. If that's the case, things + # will fail downstream. + return data + + +class BinaryFormatter(Formatter): + format = BINARY + + def __init__(self, transformer: Transformer): + super().__init__(transformer) + self._signature_sent = False + + def parse_row(self, data: Buffer) -> Optional[Tuple[Any, ...]]: + if not self._signature_sent: + if data[: len(_binary_signature)] != _binary_signature: + raise e.DataError( + "binary copy doesn't start with the expected signature" + ) + self._signature_sent = True + data = data[len(_binary_signature) :] + + elif data == _binary_trailer: + return None + + return parse_row_binary(data, self.transformer) + + def write(self, buffer: Union[Buffer, str]) -> Buffer: + data = self._ensure_bytes(buffer) + self._signature_sent = True + return data + + def write_row(self, row: Sequence[Any]) -> Buffer: + # Note down that we are writing in row mode: it means we will have + # to take care of the end-of-copy marker too + self._row_mode = True + + if not self._signature_sent: + self._write_buffer += _binary_signature + self._signature_sent = True + + format_row_binary(row, self.transformer, self._write_buffer) + if len(self._write_buffer) > BUFFER_SIZE: + buffer, self._write_buffer = self._write_buffer, bytearray() + return buffer + else: + return b"" + + def end(self) -> Buffer: + # If we have sent no data we need to send the signature + # and the trailer + if not self._signature_sent: + self._write_buffer += _binary_signature + self._write_buffer += _binary_trailer + + elif self._row_mode: + # if we have sent data already, we have sent the signature + # too (either with the first row, or we assume that in + # block mode the signature is included). + # Write the trailer only if we are sending rows (with the + # assumption that who is copying binary data is sending the + # whole format). + self._write_buffer += _binary_trailer + + buffer, self._write_buffer = self._write_buffer, bytearray() + return buffer + + def _ensure_bytes(self, data: Union[Buffer, str]) -> Buffer: + if isinstance(data, str): + raise TypeError("cannot copy str data in binary mode: use bytes instead") + else: + # Assume, for simplicity, that the user is not passing stupid + # things to the write function. If that's the case, things + # will fail downstream. + return data + + +def _format_row_text( + row: Sequence[Any], tx: Transformer, out: Optional[bytearray] = None +) -> bytearray: + """Convert a row of objects to the data to send for copy.""" + if out is None: + out = bytearray() + + if not row: + out += b"\n" + return out + + for item in row: + if item is not None: + dumper = tx.get_dumper(item, PY_TEXT) + b = dumper.dump(item) + out += _dump_re.sub(_dump_sub, b) + else: + out += rb"\N" + out += b"\t" + + out[-1:] = b"\n" + return out + + +def _format_row_binary( + row: Sequence[Any], tx: Transformer, out: Optional[bytearray] = None +) -> bytearray: + """Convert a row of objects to the data to send for binary copy.""" + if out is None: + out = bytearray() + + out += _pack_int2(len(row)) + adapted = tx.dump_sequence(row, [PY_BINARY] * len(row)) + for b in adapted: + if b is not None: + out += _pack_int4(len(b)) + out += b + else: + out += _binary_null + + return out + + +def _parse_row_text(data: Buffer, tx: Transformer) -> Tuple[Any, ...]: + if not isinstance(data, bytes): + data = bytes(data) + fields = data.split(b"\t") + fields[-1] = fields[-1][:-1] # drop \n + row = [None if f == b"\\N" else _load_re.sub(_load_sub, f) for f in fields] + return tx.load_sequence(row) + + +def _parse_row_binary(data: Buffer, tx: Transformer) -> Tuple[Any, ...]: + row: List[Optional[Buffer]] = [] + nfields = _unpack_int2(data, 0)[0] + pos = 2 + for i in range(nfields): + length = _unpack_int4(data, pos)[0] + pos += 4 + if length >= 0: + row.append(data[pos : pos + length]) + pos += length + else: + row.append(None) + + return tx.load_sequence(row) + + +_pack_int2 = struct.Struct("!h").pack +_pack_int4 = struct.Struct("!i").pack +_unpack_int2 = struct.Struct("!h").unpack_from +_unpack_int4 = struct.Struct("!i").unpack_from + +_binary_signature = ( + b"PGCOPY\n\xff\r\n\0" # Signature + b"\x00\x00\x00\x00" # flags + b"\x00\x00\x00\x00" # extra length +) +_binary_trailer = b"\xff\xff" +_binary_null = b"\xff\xff\xff\xff" + +_dump_re = re.compile(b"[\b\t\n\v\f\r\\\\]") +_dump_repl = { + b"\b": b"\\b", + b"\t": b"\\t", + b"\n": b"\\n", + b"\v": b"\\v", + b"\f": b"\\f", + b"\r": b"\\r", + b"\\": b"\\\\", +} + + +def _dump_sub(m: Match[bytes], __map: Dict[bytes, bytes] = _dump_repl) -> bytes: + return __map[m.group(0)] + + +_load_re = re.compile(b"\\\\[btnvfr\\\\]") +_load_repl = {v: k for k, v in _dump_repl.items()} + + +def _load_sub(m: Match[bytes], __map: Dict[bytes, bytes] = _load_repl) -> bytes: + return __map[m.group(0)] + + +# Override functions with fast versions if available +if _psycopg: + format_row_text = _psycopg.format_row_text + format_row_binary = _psycopg.format_row_binary + parse_row_text = _psycopg.parse_row_text + parse_row_binary = _psycopg.parse_row_binary + +else: + format_row_text = _format_row_text + format_row_binary = _format_row_binary + parse_row_text = _parse_row_text + parse_row_binary = _parse_row_binary diff --git a/lib/python3.11/site-packages/psycopg/crdb/__init__.py b/lib/python3.11/site-packages/psycopg/crdb/__init__.py new file mode 100644 index 0000000..323903a --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/crdb/__init__.py @@ -0,0 +1,19 @@ +""" +CockroachDB support package. +""" + +# Copyright (C) 2022 The Psycopg Team + +from . import _types +from .connection import CrdbConnection, AsyncCrdbConnection, CrdbConnectionInfo + +adapters = _types.adapters # exposed by the package +connect = CrdbConnection.connect + +_types.register_crdb_adapters(adapters) + +__all__ = [ + "AsyncCrdbConnection", + "CrdbConnection", + "CrdbConnectionInfo", +] diff --git a/lib/python3.11/site-packages/psycopg/crdb/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/crdb/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..9e1a798 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/crdb/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/crdb/__pycache__/_types.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/crdb/__pycache__/_types.cpython-311.pyc new file mode 100644 index 0000000..9851d4c Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/crdb/__pycache__/_types.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/crdb/__pycache__/connection.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/crdb/__pycache__/connection.cpython-311.pyc new file mode 100644 index 0000000..e0bc42a Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/crdb/__pycache__/connection.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/crdb/_types.py b/lib/python3.11/site-packages/psycopg/crdb/_types.py new file mode 100644 index 0000000..5311e05 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/crdb/_types.py @@ -0,0 +1,163 @@ +""" +Types configuration specific for CockroachDB. +""" + +# Copyright (C) 2022 The Psycopg Team + +from enum import Enum +from .._typeinfo import TypeInfo, TypesRegistry + +from ..abc import AdaptContext, NoneType +from ..postgres import TEXT_OID +from .._adapters_map import AdaptersMap +from ..types.enum import EnumDumper, EnumBinaryDumper +from ..types.none import NoneDumper + +types = TypesRegistry() + +# Global adapter maps with PostgreSQL types configuration +adapters = AdaptersMap(types=types) + + +class CrdbEnumDumper(EnumDumper): + oid = TEXT_OID + + +class CrdbEnumBinaryDumper(EnumBinaryDumper): + oid = TEXT_OID + + +class CrdbNoneDumper(NoneDumper): + oid = TEXT_OID + + +def register_postgres_adapters(context: AdaptContext) -> None: + # Same adapters used by PostgreSQL, or a good starting point for customization + + from ..types import array, bool, composite, datetime + from ..types import numeric, string, uuid + + array.register_default_adapters(context) + bool.register_default_adapters(context) + composite.register_default_adapters(context) + datetime.register_default_adapters(context) + numeric.register_default_adapters(context) + string.register_default_adapters(context) + uuid.register_default_adapters(context) + + +def register_crdb_adapters(context: AdaptContext) -> None: + from .. import dbapi20 + from ..types import array + + register_postgres_adapters(context) + + # String must come after enum to map text oid -> string dumper + register_crdb_enum_adapters(context) + register_crdb_string_adapters(context) + register_crdb_json_adapters(context) + register_crdb_net_adapters(context) + register_crdb_none_adapters(context) + + dbapi20.register_dbapi20_adapters(adapters) + + array.register_all_arrays(adapters) + + +def register_crdb_string_adapters(context: AdaptContext) -> None: + from ..types import string + + # Dump strings with text oid instead of unknown. + # Unlike PostgreSQL, CRDB seems able to cast text to most types. + context.adapters.register_dumper(str, string.StrDumper) + context.adapters.register_dumper(str, string.StrBinaryDumper) + + +def register_crdb_enum_adapters(context: AdaptContext) -> None: + context.adapters.register_dumper(Enum, CrdbEnumBinaryDumper) + context.adapters.register_dumper(Enum, CrdbEnumDumper) + + +def register_crdb_json_adapters(context: AdaptContext) -> None: + from ..types import json + + adapters = context.adapters + + # CRDB doesn't have json/jsonb: both names map to the jsonb oid + adapters.register_dumper(json.Json, json.JsonbBinaryDumper) + adapters.register_dumper(json.Json, json.JsonbDumper) + + adapters.register_dumper(json.Jsonb, json.JsonbBinaryDumper) + adapters.register_dumper(json.Jsonb, json.JsonbDumper) + + adapters.register_loader("json", json.JsonLoader) + adapters.register_loader("jsonb", json.JsonbLoader) + adapters.register_loader("json", json.JsonBinaryLoader) + adapters.register_loader("jsonb", json.JsonbBinaryLoader) + + +def register_crdb_net_adapters(context: AdaptContext) -> None: + from ..types import net + + adapters = context.adapters + + adapters.register_dumper("ipaddress.IPv4Address", net.InterfaceDumper) + adapters.register_dumper("ipaddress.IPv6Address", net.InterfaceDumper) + adapters.register_dumper("ipaddress.IPv4Interface", net.InterfaceDumper) + adapters.register_dumper("ipaddress.IPv6Interface", net.InterfaceDumper) + adapters.register_dumper("ipaddress.IPv4Address", net.AddressBinaryDumper) + adapters.register_dumper("ipaddress.IPv6Address", net.AddressBinaryDumper) + adapters.register_dumper("ipaddress.IPv4Interface", net.InterfaceBinaryDumper) + adapters.register_dumper("ipaddress.IPv6Interface", net.InterfaceBinaryDumper) + adapters.register_dumper(None, net.InetBinaryDumper) + adapters.register_loader("inet", net.InetLoader) + adapters.register_loader("inet", net.InetBinaryLoader) + + +def register_crdb_none_adapters(context: AdaptContext) -> None: + context.adapters.register_dumper(NoneType, CrdbNoneDumper) + + +for t in [ + TypeInfo("json", 3802, 3807, regtype="jsonb"), # Alias json -> jsonb. + TypeInfo("int8", 20, 1016, regtype="integer"), # Alias integer -> int8 + TypeInfo('"char"', 18, 1002), # special case, not generated + # autogenerated: start + # Generated from CockroachDB 22.1.0 + TypeInfo("bit", 1560, 1561), + TypeInfo("bool", 16, 1000, regtype="boolean"), + TypeInfo("bpchar", 1042, 1014, regtype="character"), + TypeInfo("bytea", 17, 1001), + TypeInfo("date", 1082, 1182), + TypeInfo("float4", 700, 1021, regtype="real"), + TypeInfo("float8", 701, 1022, regtype="double precision"), + TypeInfo("inet", 869, 1041), + TypeInfo("int2", 21, 1005, regtype="smallint"), + TypeInfo("int2vector", 22, 1006), + TypeInfo("int4", 23, 1007), + TypeInfo("int8", 20, 1016, regtype="bigint"), + TypeInfo("interval", 1186, 1187), + TypeInfo("jsonb", 3802, 3807), + TypeInfo("name", 19, 1003), + TypeInfo("numeric", 1700, 1231), + TypeInfo("oid", 26, 1028), + TypeInfo("oidvector", 30, 1013), + TypeInfo("record", 2249, 2287), + TypeInfo("regclass", 2205, 2210), + TypeInfo("regnamespace", 4089, 4090), + TypeInfo("regproc", 24, 1008), + TypeInfo("regprocedure", 2202, 2207), + TypeInfo("regrole", 4096, 4097), + TypeInfo("regtype", 2206, 2211), + TypeInfo("text", 25, 1009), + TypeInfo("time", 1083, 1183, regtype="time without time zone"), + TypeInfo("timestamp", 1114, 1115, regtype="timestamp without time zone"), + TypeInfo("timestamptz", 1184, 1185, regtype="timestamp with time zone"), + TypeInfo("timetz", 1266, 1270, regtype="time with time zone"), + TypeInfo("unknown", 705, 0), + TypeInfo("uuid", 2950, 2951), + TypeInfo("varbit", 1562, 1563, regtype="bit varying"), + TypeInfo("varchar", 1043, 1015, regtype="character varying"), + # autogenerated: end +]: + types.add(t) diff --git a/lib/python3.11/site-packages/psycopg/crdb/connection.py b/lib/python3.11/site-packages/psycopg/crdb/connection.py new file mode 100644 index 0000000..451474b --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/crdb/connection.py @@ -0,0 +1,185 @@ +""" +CockroachDB-specific connections. +""" + +# Copyright (C) 2022 The Psycopg Team + +import re +from typing import Any, Optional, Type, Union, overload, TYPE_CHECKING + +from .. import errors as e +from ..abc import AdaptContext +from ..rows import Row, RowFactory, AsyncRowFactory, TupleRow +from ..conninfo import ConnectionInfo +from ..connection import Connection +from .._adapters_map import AdaptersMap +from ..connection_async import AsyncConnection +from ._types import adapters + +if TYPE_CHECKING: + from ..pq.abc import PGconn + from ..cursor import Cursor + from ..cursor_async import AsyncCursor + + +class _CrdbConnectionMixin: + _adapters: Optional[AdaptersMap] + pgconn: "PGconn" + + @classmethod + def is_crdb( + cls, conn: Union[Connection[Any], AsyncConnection[Any], "PGconn"] + ) -> bool: + """ + Return `!True` if the server connected to `!conn` is CockroachDB. + """ + if isinstance(conn, (Connection, AsyncConnection)): + conn = conn.pgconn + + return bool(conn.parameter_status(b"crdb_version")) + + @property + def adapters(self) -> AdaptersMap: + if not self._adapters: + # By default, use CockroachDB adapters map + self._adapters = AdaptersMap(adapters) + + return self._adapters + + @property + def info(self) -> "CrdbConnectionInfo": + return CrdbConnectionInfo(self.pgconn) + + def _check_tpc(self) -> None: + if self.is_crdb(self.pgconn): + raise e.NotSupportedError("CockroachDB doesn't support prepared statements") + + +class CrdbConnection(_CrdbConnectionMixin, Connection[Row]): + """ + Wrapper for a connection to a CockroachDB database. + """ + + __module__ = "psycopg.crdb" + + # TODO: this method shouldn't require re-definition if the base class + # implements a generic self. + # https://github.com/psycopg/psycopg/issues/308 + @overload + @classmethod + def connect( + cls, + conninfo: str = "", + *, + autocommit: bool = False, + row_factory: RowFactory[Row], + prepare_threshold: Optional[int] = 5, + cursor_factory: "Optional[Type[Cursor[Row]]]" = None, + context: Optional[AdaptContext] = None, + **kwargs: Union[None, int, str], + ) -> "CrdbConnection[Row]": + ... + + @overload + @classmethod + def connect( + cls, + conninfo: str = "", + *, + autocommit: bool = False, + prepare_threshold: Optional[int] = 5, + cursor_factory: "Optional[Type[Cursor[Any]]]" = None, + context: Optional[AdaptContext] = None, + **kwargs: Union[None, int, str], + ) -> "CrdbConnection[TupleRow]": + ... + + @classmethod + def connect(cls, conninfo: str = "", **kwargs: Any) -> "CrdbConnection[Any]": + """ + Connect to a database server and return a new `CrdbConnection` instance. + """ + return super().connect(conninfo, **kwargs) # type: ignore[return-value] + + +class AsyncCrdbConnection(_CrdbConnectionMixin, AsyncConnection[Row]): + """ + Wrapper for an async connection to a CockroachDB database. + """ + + __module__ = "psycopg.crdb" + + # TODO: this method shouldn't require re-definition if the base class + # implements a generic self. + # https://github.com/psycopg/psycopg/issues/308 + @overload + @classmethod + async def connect( + cls, + conninfo: str = "", + *, + autocommit: bool = False, + prepare_threshold: Optional[int] = 5, + row_factory: AsyncRowFactory[Row], + cursor_factory: "Optional[Type[AsyncCursor[Row]]]" = None, + context: Optional[AdaptContext] = None, + **kwargs: Union[None, int, str], + ) -> "AsyncCrdbConnection[Row]": + ... + + @overload + @classmethod + async def connect( + cls, + conninfo: str = "", + *, + autocommit: bool = False, + prepare_threshold: Optional[int] = 5, + cursor_factory: "Optional[Type[AsyncCursor[Any]]]" = None, + context: Optional[AdaptContext] = None, + **kwargs: Union[None, int, str], + ) -> "AsyncCrdbConnection[TupleRow]": + ... + + @classmethod + async def connect( + cls, conninfo: str = "", **kwargs: Any + ) -> "AsyncCrdbConnection[Any]": + return await super().connect(conninfo, **kwargs) # type: ignore [no-any-return] + + +class CrdbConnectionInfo(ConnectionInfo): + """ + `~psycopg.ConnectionInfo` subclass to get info about a CockroachDB database. + """ + + __module__ = "psycopg.crdb" + + @property + def vendor(self) -> str: + return "CockroachDB" + + @property + def server_version(self) -> int: + """ + Return the CockroachDB server version connected. + + Return a number in the PostgreSQL format (e.g. 21.2.10 -> 210210). + """ + sver = self.parameter_status("crdb_version") + if not sver: + raise e.InternalError("'crdb_version' parameter status not set") + + ver = self.parse_crdb_version(sver) + if ver is None: + raise e.InterfaceError(f"couldn't parse CockroachDB version from: {sver!r}") + + return ver + + @classmethod + def parse_crdb_version(self, sver: str) -> Optional[int]: + m = re.search(r"\bv(\d+)\.(\d+)\.(\d+)", sver) + if not m: + return None + + return int(m.group(1)) * 10000 + int(m.group(2)) * 100 + int(m.group(3)) diff --git a/lib/python3.11/site-packages/psycopg/cursor.py b/lib/python3.11/site-packages/psycopg/cursor.py new file mode 100644 index 0000000..148ea10 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/cursor.py @@ -0,0 +1,915 @@ +""" +psycopg cursor objects +""" + +# Copyright (C) 2020 The Psycopg Team + +from functools import partial +from types import TracebackType +from typing import Any, Generic, Iterable, Iterator, List +from typing import Optional, NoReturn, Sequence, Tuple, Type, TypeVar +from typing import overload, TYPE_CHECKING +from contextlib import contextmanager + +from . import pq +from . import adapt +from . import errors as e +from .abc import ConnectionType, Query, Params, PQGen +from .copy import Copy, Writer as CopyWriter +from .rows import Row, RowMaker, RowFactory +from ._column import Column +from ._queries import PostgresQuery, PostgresClientQuery +from ._pipeline import Pipeline +from ._encodings import pgconn_encoding +from ._preparing import Prepare +from .generators import execute, fetch, send + +if TYPE_CHECKING: + from .abc import Transformer + from .pq.abc import PGconn, PGresult + from .connection import Connection + +TEXT = pq.Format.TEXT +BINARY = pq.Format.BINARY + +EMPTY_QUERY = pq.ExecStatus.EMPTY_QUERY +COMMAND_OK = pq.ExecStatus.COMMAND_OK +TUPLES_OK = pq.ExecStatus.TUPLES_OK +COPY_OUT = pq.ExecStatus.COPY_OUT +COPY_IN = pq.ExecStatus.COPY_IN +COPY_BOTH = pq.ExecStatus.COPY_BOTH +FATAL_ERROR = pq.ExecStatus.FATAL_ERROR +SINGLE_TUPLE = pq.ExecStatus.SINGLE_TUPLE +PIPELINE_ABORTED = pq.ExecStatus.PIPELINE_ABORTED + +ACTIVE = pq.TransactionStatus.ACTIVE + + +class BaseCursor(Generic[ConnectionType, Row]): + __slots__ = """ + _conn format _adapters arraysize _closed _results pgresult _pos + _iresult _rowcount _query _tx _last_query _row_factory _make_row + _pgconn _execmany_returning + __weakref__ + """.split() + + ExecStatus = pq.ExecStatus + + _tx: "Transformer" + _make_row: RowMaker[Row] + _pgconn: "PGconn" + + def __init__(self, connection: ConnectionType): + self._conn = connection + self.format = TEXT + self._pgconn = connection.pgconn + self._adapters = adapt.AdaptersMap(connection.adapters) + self.arraysize = 1 + self._closed = False + self._last_query: Optional[Query] = None + self._reset() + + def _reset(self, reset_query: bool = True) -> None: + self._results: List["PGresult"] = [] + self.pgresult: Optional["PGresult"] = None + self._pos = 0 + self._iresult = 0 + self._rowcount = -1 + self._query: Optional[PostgresQuery] + # None if executemany() not executing, True/False according to returning state + self._execmany_returning: Optional[bool] = None + if reset_query: + self._query = None + + def __repr__(self) -> str: + cls = f"{self.__class__.__module__}.{self.__class__.__qualname__}" + info = pq.misc.connection_summary(self._pgconn) + if self._closed: + status = "closed" + elif self.pgresult: + status = pq.ExecStatus(self.pgresult.status).name + else: + status = "no result" + return f"<{cls} [{status}] {info} at 0x{id(self):x}>" + + @property + def connection(self) -> ConnectionType: + """The connection this cursor is using.""" + return self._conn + + @property + def adapters(self) -> adapt.AdaptersMap: + return self._adapters + + @property + def closed(self) -> bool: + """`True` if the cursor is closed.""" + return self._closed + + @property + def description(self) -> Optional[List[Column]]: + """ + A list of `Column` objects describing the current resultset. + + `!None` if the current resultset didn't return tuples. + """ + res = self.pgresult + + # We return columns if we have nfields, but also if we don't but + # the query said we got tuples (mostly to handle the super useful + # query "SELECT ;" + if res and ( + res.nfields or res.status == TUPLES_OK or res.status == SINGLE_TUPLE + ): + return [Column(self, i) for i in range(res.nfields)] + else: + return None + + @property + def rowcount(self) -> int: + """Number of records affected by the precedent operation.""" + return self._rowcount + + @property + def rownumber(self) -> Optional[int]: + """Index of the next row to fetch in the current result. + + `!None` if there is no result to fetch. + """ + tuples = self.pgresult and self.pgresult.status == TUPLES_OK + return self._pos if tuples else None + + def setinputsizes(self, sizes: Sequence[Any]) -> None: + # no-op + pass + + def setoutputsize(self, size: Any, column: Optional[int] = None) -> None: + # no-op + pass + + def nextset(self) -> Optional[bool]: + """ + Move to the result set of the next query executed through `executemany()` + or to the next result set if `execute()` returned more than one. + + Return `!True` if a new result is available, which will be the one + methods `!fetch*()` will operate on. + """ + if self._iresult < len(self._results) - 1: + self._select_current_result(self._iresult + 1) + return True + else: + return None + + @property + def statusmessage(self) -> Optional[str]: + """ + The command status tag from the last SQL command executed. + + `!None` if the cursor doesn't have a result available. + """ + msg = self.pgresult.command_status if self.pgresult else None + return msg.decode() if msg else None + + def _make_row_maker(self) -> RowMaker[Row]: + raise NotImplementedError + + # + # Generators for the high level operations on the cursor + # + # Like for sync/async connections, these are implemented as generators + # so that different concurrency strategies (threads,asyncio) can use their + # own way of waiting (or better, `connection.wait()`). + # + + def _execute_gen( + self, + query: Query, + params: Optional[Params] = None, + *, + prepare: Optional[bool] = None, + binary: Optional[bool] = None, + ) -> PQGen[None]: + """Generator implementing `Cursor.execute()`.""" + yield from self._start_query(query) + pgq = self._convert_query(query, params) + results = yield from self._maybe_prepare_gen( + pgq, prepare=prepare, binary=binary + ) + if self._conn._pipeline: + yield from self._conn._pipeline._communicate_gen() + else: + assert results is not None + self._check_results(results) + self._results = results + self._select_current_result(0) + + self._last_query = query + + for cmd in self._conn._prepared.get_maintenance_commands(): + yield from self._conn._exec_command(cmd) + + def _executemany_gen_pipeline( + self, query: Query, params_seq: Iterable[Params], returning: bool + ) -> PQGen[None]: + """ + Generator implementing `Cursor.executemany()` with pipelines available. + """ + pipeline = self._conn._pipeline + assert pipeline + + yield from self._start_query(query) + if not returning: + self._rowcount = 0 + + assert self._execmany_returning is None + self._execmany_returning = returning + + first = True + for params in params_seq: + if first: + pgq = self._convert_query(query, params) + self._query = pgq + first = False + else: + pgq.dump(params) + + yield from self._maybe_prepare_gen(pgq, prepare=True) + yield from pipeline._communicate_gen() + + self._last_query = query + + if returning: + yield from pipeline._fetch_gen(flush=True) + + for cmd in self._conn._prepared.get_maintenance_commands(): + yield from self._conn._exec_command(cmd) + + def _executemany_gen_no_pipeline( + self, query: Query, params_seq: Iterable[Params], returning: bool + ) -> PQGen[None]: + """ + Generator implementing `Cursor.executemany()` with pipelines not available. + """ + yield from self._start_query(query) + if not returning: + self._rowcount = 0 + first = True + for params in params_seq: + if first: + pgq = self._convert_query(query, params) + self._query = pgq + first = False + else: + pgq.dump(params) + + results = yield from self._maybe_prepare_gen(pgq, prepare=True) + assert results is not None + self._check_results(results) + if returning: + self._results.extend(results) + else: + # In non-returning case, set rowcount to the cumulated number + # of rows of executed queries. + for res in results: + self._rowcount += res.command_tuples or 0 + + if self._results: + self._select_current_result(0) + + self._last_query = query + + for cmd in self._conn._prepared.get_maintenance_commands(): + yield from self._conn._exec_command(cmd) + + def _maybe_prepare_gen( + self, + pgq: PostgresQuery, + *, + prepare: Optional[bool] = None, + binary: Optional[bool] = None, + ) -> PQGen[Optional[List["PGresult"]]]: + # Check if the query is prepared or needs preparing + prep, name = self._get_prepared(pgq, prepare) + if prep is Prepare.NO: + # The query must be executed without preparing + self._execute_send(pgq, binary=binary) + else: + # If the query is not already prepared, prepare it. + if prep is Prepare.SHOULD: + self._send_prepare(name, pgq) + if not self._conn._pipeline: + (result,) = yield from execute(self._pgconn) + if result.status == FATAL_ERROR: + raise e.error_from_result(result, encoding=self._encoding) + # Then execute it. + self._send_query_prepared(name, pgq, binary=binary) + + # Update the prepare state of the query. + # If an operation requires to flush our prepared statements cache, + # it will be added to the maintenance commands to execute later. + key = self._conn._prepared.maybe_add_to_cache(pgq, prep, name) + + if self._conn._pipeline: + queued = None + if key is not None: + queued = (key, prep, name) + self._conn._pipeline.result_queue.append((self, queued)) + return None + + # run the query + results = yield from execute(self._pgconn) + + if key is not None: + self._conn._prepared.validate(key, prep, name, results) + + return results + + def _get_prepared( + self, pgq: PostgresQuery, prepare: Optional[bool] = None + ) -> Tuple[Prepare, bytes]: + return self._conn._prepared.get(pgq, prepare) + + def _stream_send_gen( + self, + query: Query, + params: Optional[Params] = None, + *, + binary: Optional[bool] = None, + ) -> PQGen[None]: + """Generator to send the query for `Cursor.stream()`.""" + yield from self._start_query(query) + pgq = self._convert_query(query, params) + self._execute_send(pgq, binary=binary, force_extended=True) + self._pgconn.set_single_row_mode() + self._last_query = query + yield from send(self._pgconn) + + def _stream_fetchone_gen(self, first: bool) -> PQGen[Optional["PGresult"]]: + res = yield from fetch(self._pgconn) + if res is None: + return None + + status = res.status + if status == SINGLE_TUPLE: + self.pgresult = res + self._tx.set_pgresult(res, set_loaders=first) + if first: + self._make_row = self._make_row_maker() + return res + + elif status == TUPLES_OK or status == COMMAND_OK: + # End of single row results + while res: + res = yield from fetch(self._pgconn) + if status != TUPLES_OK: + raise e.ProgrammingError( + "the operation in stream() didn't produce a result" + ) + return None + + else: + # Errors, unexpected values + return self._raise_for_result(res) + + def _start_query(self, query: Optional[Query] = None) -> PQGen[None]: + """Generator to start the processing of a query. + + It is implemented as generator because it may send additional queries, + such as `begin`. + """ + if self.closed: + raise e.InterfaceError("the cursor is closed") + + self._reset() + if not self._last_query or (self._last_query is not query): + self._last_query = None + self._tx = adapt.Transformer(self) + yield from self._conn._start_query() + + def _start_copy_gen( + self, statement: Query, params: Optional[Params] = None + ) -> PQGen[None]: + """Generator implementing sending a command for `Cursor.copy().""" + + # The connection gets in an unrecoverable state if we attempt COPY in + # pipeline mode. Forbid it explicitly. + if self._conn._pipeline: + raise e.NotSupportedError("COPY cannot be used in pipeline mode") + + yield from self._start_query() + + # Merge the params client-side + if params: + pgq = PostgresClientQuery(self._tx) + pgq.convert(statement, params) + statement = pgq.query + + query = self._convert_query(statement) + + self._execute_send(query, binary=False) + results = yield from execute(self._pgconn) + if len(results) != 1: + raise e.ProgrammingError("COPY cannot be mixed with other operations") + + self._check_copy_result(results[0]) + self._results = results + self._select_current_result(0) + + def _execute_send( + self, + query: PostgresQuery, + *, + force_extended: bool = False, + binary: Optional[bool] = None, + ) -> None: + """ + Implement part of execute() before waiting common to sync and async. + + This is not a generator, but a normal non-blocking function. + """ + if binary is None: + fmt = self.format + else: + fmt = BINARY if binary else TEXT + + self._query = query + + if self._conn._pipeline: + # In pipeline mode always use PQsendQueryParams - see #314 + # Multiple statements in the same query are not allowed anyway. + self._conn._pipeline.command_queue.append( + partial( + self._pgconn.send_query_params, + query.query, + query.params, + param_formats=query.formats, + param_types=query.types, + result_format=fmt, + ) + ) + elif force_extended or query.params or fmt == BINARY: + self._pgconn.send_query_params( + query.query, + query.params, + param_formats=query.formats, + param_types=query.types, + result_format=fmt, + ) + else: + # If we can, let's use simple query protocol, + # as it can execute more than one statement in a single query. + self._pgconn.send_query(query.query) + + def _convert_query( + self, query: Query, params: Optional[Params] = None + ) -> PostgresQuery: + pgq = PostgresQuery(self._tx) + pgq.convert(query, params) + return pgq + + def _check_results(self, results: List["PGresult"]) -> None: + """ + Verify that the results of a query are valid. + + Verify that the query returned at least one result and that they all + represent a valid result from the database. + """ + if not results: + raise e.InternalError("got no result from the query") + + for res in results: + status = res.status + if status != TUPLES_OK and status != COMMAND_OK and status != EMPTY_QUERY: + self._raise_for_result(res) + + def _raise_for_result(self, result: "PGresult") -> NoReturn: + """ + Raise an appropriate error message for an unexpected database result + """ + status = result.status + if status == FATAL_ERROR: + raise e.error_from_result(result, encoding=self._encoding) + elif status == PIPELINE_ABORTED: + raise e.PipelineAborted("pipeline aborted") + elif status == COPY_IN or status == COPY_OUT or status == COPY_BOTH: + raise e.ProgrammingError( + "COPY cannot be used with this method; use copy() instead" + ) + else: + raise e.InternalError( + "unexpected result status from query:" f" {pq.ExecStatus(status).name}" + ) + + def _select_current_result( + self, i: int, format: Optional[pq.Format] = None + ) -> None: + """ + Select one of the results in the cursor as the active one. + """ + self._iresult = i + res = self.pgresult = self._results[i] + + # Note: the only reason to override format is to correctly set + # binary loaders on server-side cursors, because send_describe_portal + # only returns a text result. + self._tx.set_pgresult(res, format=format) + + self._pos = 0 + + if res.status == TUPLES_OK: + self._rowcount = self.pgresult.ntuples + + # COPY_OUT has never info about nrows. We need such result for the + # columns in order to return a `description`, but not overwrite the + # cursor rowcount (which was set by the Copy object). + elif res.status != COPY_OUT: + nrows = self.pgresult.command_tuples + self._rowcount = nrows if nrows is not None else -1 + + self._make_row = self._make_row_maker() + + def _set_results_from_pipeline(self, results: List["PGresult"]) -> None: + self._check_results(results) + first_batch = not self._results + + if self._execmany_returning is None: + # Received from execute() + self._results.extend(results) + if first_batch: + self._select_current_result(0) + + else: + # Received from executemany() + if self._execmany_returning: + self._results.extend(results) + if first_batch: + self._select_current_result(0) + else: + # In non-returning case, set rowcount to the cumulated number of + # rows of executed queries. + for res in results: + self._rowcount += res.command_tuples or 0 + + def _send_prepare(self, name: bytes, query: PostgresQuery) -> None: + if self._conn._pipeline: + self._conn._pipeline.command_queue.append( + partial( + self._pgconn.send_prepare, + name, + query.query, + param_types=query.types, + ) + ) + self._conn._pipeline.result_queue.append(None) + else: + self._pgconn.send_prepare(name, query.query, param_types=query.types) + + def _send_query_prepared( + self, name: bytes, pgq: PostgresQuery, *, binary: Optional[bool] = None + ) -> None: + if binary is None: + fmt = self.format + else: + fmt = BINARY if binary else TEXT + + if self._conn._pipeline: + self._conn._pipeline.command_queue.append( + partial( + self._pgconn.send_query_prepared, + name, + pgq.params, + param_formats=pgq.formats, + result_format=fmt, + ) + ) + else: + self._pgconn.send_query_prepared( + name, pgq.params, param_formats=pgq.formats, result_format=fmt + ) + + def _check_result_for_fetch(self) -> None: + if self.closed: + raise e.InterfaceError("the cursor is closed") + res = self.pgresult + if not res: + raise e.ProgrammingError("no result available") + + status = res.status + if status == TUPLES_OK: + return + elif status == FATAL_ERROR: + raise e.error_from_result(res, encoding=self._encoding) + elif status == PIPELINE_ABORTED: + raise e.PipelineAborted("pipeline aborted") + else: + raise e.ProgrammingError("the last operation didn't produce a result") + + def _check_copy_result(self, result: "PGresult") -> None: + """ + Check that the value returned in a copy() operation is a legit COPY. + """ + status = result.status + if status == COPY_IN or status == COPY_OUT: + return + elif status == FATAL_ERROR: + raise e.error_from_result(result, encoding=self._encoding) + else: + raise e.ProgrammingError( + "copy() should be used only with COPY ... TO STDOUT or COPY ..." + f" FROM STDIN statements, got {pq.ExecStatus(status).name}" + ) + + def _scroll(self, value: int, mode: str) -> None: + self._check_result_for_fetch() + assert self.pgresult + if mode == "relative": + newpos = self._pos + value + elif mode == "absolute": + newpos = value + else: + raise ValueError(f"bad mode: {mode}. It should be 'relative' or 'absolute'") + if not 0 <= newpos < self.pgresult.ntuples: + raise IndexError("position out of bound") + self._pos = newpos + + def _close(self) -> None: + """Non-blocking part of closing. Common to sync/async.""" + # Don't reset the query because it may be useful to investigate after + # an error. + self._reset(reset_query=False) + self._closed = True + + @property + def _encoding(self) -> str: + return pgconn_encoding(self._pgconn) + + +class Cursor(BaseCursor["Connection[Any]", Row]): + __module__ = "psycopg" + __slots__ = () + _Self = TypeVar("_Self", bound="Cursor[Any]") + + @overload + def __init__(self: "Cursor[Row]", connection: "Connection[Row]"): + ... + + @overload + def __init__( + self: "Cursor[Row]", + connection: "Connection[Any]", + *, + row_factory: RowFactory[Row], + ): + ... + + def __init__( + self, + connection: "Connection[Any]", + *, + row_factory: Optional[RowFactory[Row]] = None, + ): + super().__init__(connection) + self._row_factory = row_factory or connection.row_factory + + def __enter__(self: _Self) -> _Self: + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.close() + + def close(self) -> None: + """ + Close the current cursor and free associated resources. + """ + self._close() + + @property + def row_factory(self) -> RowFactory[Row]: + """Writable attribute to control how result rows are formed.""" + return self._row_factory + + @row_factory.setter + def row_factory(self, row_factory: RowFactory[Row]) -> None: + self._row_factory = row_factory + if self.pgresult: + self._make_row = row_factory(self) + + def _make_row_maker(self) -> RowMaker[Row]: + return self._row_factory(self) + + def execute( + self: _Self, + query: Query, + params: Optional[Params] = None, + *, + prepare: Optional[bool] = None, + binary: Optional[bool] = None, + ) -> _Self: + """ + Execute a query or command to the database. + """ + try: + with self._conn.lock: + self._conn.wait( + self._execute_gen(query, params, prepare=prepare, binary=binary) + ) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + return self + + def executemany( + self, + query: Query, + params_seq: Iterable[Params], + *, + returning: bool = False, + ) -> None: + """ + Execute the same command with a sequence of input data. + """ + try: + if Pipeline.is_supported(): + # If there is already a pipeline, ride it, in order to avoid + # sending unnecessary Sync. + with self._conn.lock: + p = self._conn._pipeline + if p: + self._conn.wait( + self._executemany_gen_pipeline(query, params_seq, returning) + ) + # Otherwise, make a new one + if not p: + with self._conn.pipeline(), self._conn.lock: + self._conn.wait( + self._executemany_gen_pipeline(query, params_seq, returning) + ) + else: + with self._conn.lock: + self._conn.wait( + self._executemany_gen_no_pipeline(query, params_seq, returning) + ) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + def stream( + self, + query: Query, + params: Optional[Params] = None, + *, + binary: Optional[bool] = None, + ) -> Iterator[Row]: + """ + Iterate row-by-row on a result from the database. + """ + if self._pgconn.pipeline_status: + raise e.ProgrammingError("stream() cannot be used in pipeline mode") + + with self._conn.lock: + try: + self._conn.wait(self._stream_send_gen(query, params, binary=binary)) + first = True + while self._conn.wait(self._stream_fetchone_gen(first)): + # We know that, if we got a result, it has a single row. + rec: Row = self._tx.load_row(0, self._make_row) # type: ignore + yield rec + first = False + + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + finally: + if self._pgconn.transaction_status == ACTIVE: + # Try to cancel the query, then consume the results + # already received. + self._conn.cancel() + try: + while self._conn.wait(self._stream_fetchone_gen(first=False)): + pass + except Exception: + pass + + # Try to get out of ACTIVE state. Just do a single attempt, which + # should work to recover from an error or query cancelled. + try: + self._conn.wait(self._stream_fetchone_gen(first=False)) + except Exception: + pass + + def fetchone(self) -> Optional[Row]: + """ + Return the next record from the current recordset. + + Return `!None` the recordset is finished. + + :rtype: Optional[Row], with Row defined by `row_factory` + """ + self._fetch_pipeline() + self._check_result_for_fetch() + record = self._tx.load_row(self._pos, self._make_row) + if record is not None: + self._pos += 1 + return record + + def fetchmany(self, size: int = 0) -> List[Row]: + """ + Return the next `!size` records from the current recordset. + + `!size` default to `!self.arraysize` if not specified. + + :rtype: Sequence[Row], with Row defined by `row_factory` + """ + self._fetch_pipeline() + self._check_result_for_fetch() + assert self.pgresult + + if not size: + size = self.arraysize + records = self._tx.load_rows( + self._pos, + min(self._pos + size, self.pgresult.ntuples), + self._make_row, + ) + self._pos += len(records) + return records + + def fetchall(self) -> List[Row]: + """ + Return all the remaining records from the current recordset. + + :rtype: Sequence[Row], with Row defined by `row_factory` + """ + self._fetch_pipeline() + self._check_result_for_fetch() + assert self.pgresult + records = self._tx.load_rows(self._pos, self.pgresult.ntuples, self._make_row) + self._pos = self.pgresult.ntuples + return records + + def __iter__(self) -> Iterator[Row]: + self._fetch_pipeline() + self._check_result_for_fetch() + + def load(pos: int) -> Optional[Row]: + return self._tx.load_row(pos, self._make_row) + + while True: + row = load(self._pos) + if row is None: + break + self._pos += 1 + yield row + + def scroll(self, value: int, mode: str = "relative") -> None: + """ + Move the cursor in the result set to a new position according to mode. + + If `!mode` is ``'relative'`` (default), `!value` is taken as offset to + the current position in the result set; if set to ``'absolute'``, + `!value` states an absolute target position. + + Raise `!IndexError` in case a scroll operation would leave the result + set. In this case the position will not change. + """ + self._fetch_pipeline() + self._scroll(value, mode) + + @contextmanager + def copy( + self, + statement: Query, + params: Optional[Params] = None, + *, + writer: Optional[CopyWriter] = None, + ) -> Iterator[Copy]: + """ + Initiate a :sql:`COPY` operation and return an object to manage it. + + :rtype: Copy + """ + try: + with self._conn.lock: + self._conn.wait(self._start_copy_gen(statement, params)) + + with Copy(self, writer=writer) as copy: + yield copy + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + # If a fresher result has been set on the cursor by the Copy object, + # read its properties (especially rowcount). + self._select_current_result(0) + + def _fetch_pipeline(self) -> None: + if ( + self._execmany_returning is not False + and not self.pgresult + and self._conn._pipeline + ): + with self._conn.lock: + self._conn.wait(self._conn._pipeline._fetch_gen(flush=True)) diff --git a/lib/python3.11/site-packages/psycopg/cursor_async.py b/lib/python3.11/site-packages/psycopg/cursor_async.py new file mode 100644 index 0000000..ab7b073 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/cursor_async.py @@ -0,0 +1,249 @@ +""" +psycopg async cursor objects +""" + +# Copyright (C) 2020 The Psycopg Team + +from types import TracebackType +from typing import Any, AsyncIterator, Iterable, List +from typing import Optional, Type, TypeVar, TYPE_CHECKING, overload +from contextlib import asynccontextmanager + +from . import pq +from . import errors as e +from .abc import Query, Params +from .copy import AsyncCopy, AsyncWriter as AsyncCopyWriter +from .rows import Row, RowMaker, AsyncRowFactory +from .cursor import BaseCursor +from ._pipeline import Pipeline + +if TYPE_CHECKING: + from .connection_async import AsyncConnection + +ACTIVE = pq.TransactionStatus.ACTIVE + + +class AsyncCursor(BaseCursor["AsyncConnection[Any]", Row]): + __module__ = "psycopg" + __slots__ = () + _Self = TypeVar("_Self", bound="AsyncCursor[Any]") + + @overload + def __init__(self: "AsyncCursor[Row]", connection: "AsyncConnection[Row]"): + ... + + @overload + def __init__( + self: "AsyncCursor[Row]", + connection: "AsyncConnection[Any]", + *, + row_factory: AsyncRowFactory[Row], + ): + ... + + def __init__( + self, + connection: "AsyncConnection[Any]", + *, + row_factory: Optional[AsyncRowFactory[Row]] = None, + ): + super().__init__(connection) + self._row_factory = row_factory or connection.row_factory + + async def __aenter__(self: _Self) -> _Self: + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.close() + + async def close(self) -> None: + self._close() + + @property + def row_factory(self) -> AsyncRowFactory[Row]: + return self._row_factory + + @row_factory.setter + def row_factory(self, row_factory: AsyncRowFactory[Row]) -> None: + self._row_factory = row_factory + if self.pgresult: + self._make_row = row_factory(self) + + def _make_row_maker(self) -> RowMaker[Row]: + return self._row_factory(self) + + async def execute( + self: _Self, + query: Query, + params: Optional[Params] = None, + *, + prepare: Optional[bool] = None, + binary: Optional[bool] = None, + ) -> _Self: + try: + async with self._conn.lock: + await self._conn.wait( + self._execute_gen(query, params, prepare=prepare, binary=binary) + ) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + return self + + async def executemany( + self, + query: Query, + params_seq: Iterable[Params], + *, + returning: bool = False, + ) -> None: + try: + if Pipeline.is_supported(): + # If there is already a pipeline, ride it, in order to avoid + # sending unnecessary Sync. + async with self._conn.lock: + p = self._conn._pipeline + if p: + await self._conn.wait( + self._executemany_gen_pipeline(query, params_seq, returning) + ) + # Otherwise, make a new one + if not p: + async with self._conn.pipeline(), self._conn.lock: + await self._conn.wait( + self._executemany_gen_pipeline(query, params_seq, returning) + ) + else: + await self._conn.wait( + self._executemany_gen_no_pipeline(query, params_seq, returning) + ) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + async def stream( + self, + query: Query, + params: Optional[Params] = None, + *, + binary: Optional[bool] = None, + ) -> AsyncIterator[Row]: + if self._pgconn.pipeline_status: + raise e.ProgrammingError("stream() cannot be used in pipeline mode") + + async with self._conn.lock: + try: + await self._conn.wait( + self._stream_send_gen(query, params, binary=binary) + ) + first = True + while await self._conn.wait(self._stream_fetchone_gen(first)): + # We know that, if we got a result, it has a single row. + rec: Row = self._tx.load_row(0, self._make_row) # type: ignore + yield rec + first = False + + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + finally: + if self._pgconn.transaction_status == ACTIVE: + # Try to cancel the query, then consume the results + # already received. + self._conn.cancel() + try: + while await self._conn.wait( + self._stream_fetchone_gen(first=False) + ): + pass + except Exception: + pass + + # Try to get out of ACTIVE state. Just do a single attempt, which + # should work to recover from an error or query cancelled. + try: + await self._conn.wait(self._stream_fetchone_gen(first=False)) + except Exception: + pass + + async def fetchone(self) -> Optional[Row]: + await self._fetch_pipeline() + self._check_result_for_fetch() + record = self._tx.load_row(self._pos, self._make_row) + if record is not None: + self._pos += 1 + return record + + async def fetchmany(self, size: int = 0) -> List[Row]: + await self._fetch_pipeline() + self._check_result_for_fetch() + assert self.pgresult + + if not size: + size = self.arraysize + records = self._tx.load_rows( + self._pos, + min(self._pos + size, self.pgresult.ntuples), + self._make_row, + ) + self._pos += len(records) + return records + + async def fetchall(self) -> List[Row]: + await self._fetch_pipeline() + self._check_result_for_fetch() + assert self.pgresult + records = self._tx.load_rows(self._pos, self.pgresult.ntuples, self._make_row) + self._pos = self.pgresult.ntuples + return records + + async def __aiter__(self) -> AsyncIterator[Row]: + await self._fetch_pipeline() + self._check_result_for_fetch() + + def load(pos: int) -> Optional[Row]: + return self._tx.load_row(pos, self._make_row) + + while True: + row = load(self._pos) + if row is None: + break + self._pos += 1 + yield row + + async def scroll(self, value: int, mode: str = "relative") -> None: + self._scroll(value, mode) + + @asynccontextmanager + async def copy( + self, + statement: Query, + params: Optional[Params] = None, + *, + writer: Optional[AsyncCopyWriter] = None, + ) -> AsyncIterator[AsyncCopy]: + """ + :rtype: AsyncCopy + """ + try: + async with self._conn.lock: + await self._conn.wait(self._start_copy_gen(statement, params)) + + async with AsyncCopy(self, writer=writer) as copy: + yield copy + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + self._select_current_result(0) + + async def _fetch_pipeline(self) -> None: + if ( + self._execmany_returning is not False + and not self.pgresult + and self._conn._pipeline + ): + async with self._conn.lock: + await self._conn.wait(self._conn._pipeline._fetch_gen(flush=True)) diff --git a/lib/python3.11/site-packages/psycopg/dbapi20.py b/lib/python3.11/site-packages/psycopg/dbapi20.py new file mode 100644 index 0000000..3c3d8b7 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/dbapi20.py @@ -0,0 +1,112 @@ +""" +Compatibility objects with DBAPI 2.0 +""" + +# Copyright (C) 2020 The Psycopg Team + +import time +import datetime as dt +from math import floor +from typing import Any, Sequence, Union + +from . import postgres +from .abc import AdaptContext, Buffer +from .types.string import BytesDumper, BytesBinaryDumper + + +class DBAPITypeObject: + def __init__(self, name: str, type_names: Sequence[str]): + self.name = name + self.values = tuple(postgres.types[n].oid for n in type_names) + + def __repr__(self) -> str: + return f"psycopg.{self.name}" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, int): + return other in self.values + else: + return NotImplemented + + def __ne__(self, other: Any) -> bool: + if isinstance(other, int): + return other not in self.values + else: + return NotImplemented + + +BINARY = DBAPITypeObject("BINARY", ("bytea",)) +DATETIME = DBAPITypeObject( + "DATETIME", "timestamp timestamptz date time timetz interval".split() +) +NUMBER = DBAPITypeObject("NUMBER", "int2 int4 int8 float4 float8 numeric".split()) +ROWID = DBAPITypeObject("ROWID", ("oid",)) +STRING = DBAPITypeObject("STRING", "text varchar bpchar".split()) + + +class Binary: + def __init__(self, obj: Any): + self.obj = obj + + def __repr__(self) -> str: + sobj = repr(self.obj) + if len(sobj) > 40: + sobj = f"{sobj[:35]} ... ({len(sobj)} byteschars)" + return f"{self.__class__.__name__}({sobj})" + + +class BinaryBinaryDumper(BytesBinaryDumper): + def dump(self, obj: Union[Buffer, Binary]) -> Buffer: + if isinstance(obj, Binary): + return super().dump(obj.obj) + else: + return super().dump(obj) + + +class BinaryTextDumper(BytesDumper): + def dump(self, obj: Union[Buffer, Binary]) -> Buffer: + if isinstance(obj, Binary): + return super().dump(obj.obj) + else: + return super().dump(obj) + + +def Date(year: int, month: int, day: int) -> dt.date: + return dt.date(year, month, day) + + +def DateFromTicks(ticks: float) -> dt.date: + return TimestampFromTicks(ticks).date() + + +def Time(hour: int, minute: int, second: int) -> dt.time: + return dt.time(hour, minute, second) + + +def TimeFromTicks(ticks: float) -> dt.time: + return TimestampFromTicks(ticks).time() + + +def Timestamp( + year: int, month: int, day: int, hour: int, minute: int, second: int +) -> dt.datetime: + return dt.datetime(year, month, day, hour, minute, second) + + +def TimestampFromTicks(ticks: float) -> dt.datetime: + secs = floor(ticks) + frac = ticks - secs + t = time.localtime(ticks) + tzinfo = dt.timezone(dt.timedelta(seconds=t.tm_gmtoff)) + rv = dt.datetime(*t[:6], round(frac * 1_000_000), tzinfo=tzinfo) + return rv + + +def register_dbapi20_adapters(context: AdaptContext) -> None: + adapters = context.adapters + adapters.register_dumper(Binary, BinaryTextDumper) + adapters.register_dumper(Binary, BinaryBinaryDumper) + + # Make them also the default dumpers when dumping by bytea oid + adapters.register_dumper(None, BinaryTextDumper) + adapters.register_dumper(None, BinaryBinaryDumper) diff --git a/lib/python3.11/site-packages/psycopg/errors.py b/lib/python3.11/site-packages/psycopg/errors.py new file mode 100644 index 0000000..d8b7f6f --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/errors.py @@ -0,0 +1,1544 @@ +""" +psycopg exceptions + +DBAPI-defined Exceptions are defined in the following hierarchy:: + + Exceptions + |__Warning + |__Error + |__InterfaceError + |__DatabaseError + |__DataError + |__OperationalError + |__IntegrityError + |__InternalError + |__ProgrammingError + |__NotSupportedError +""" + +# Copyright (C) 2020 The Psycopg Team + +from typing import Any, Dict, Optional, Sequence, Tuple, Type, Union +from typing_extensions import TypeAlias +from asyncio import CancelledError + +from .pq.abc import PGconn, PGresult +from .pq._enums import DiagnosticField +from ._compat import TypeGuard + +ErrorInfo: TypeAlias = Union[None, PGresult, Dict[int, Optional[bytes]]] + +_sqlcodes: Dict[str, "Type[Error]"] = {} + + +class Warning(Exception): + """ + Exception raised for important warnings. + + Defined for DBAPI compatibility, but never raised by ``psycopg``. + """ + + __module__ = "psycopg" + + +class Error(Exception): + """ + Base exception for all the errors psycopg will raise. + + Exception that is the base class of all other error exceptions. You can + use this to catch all errors with one single `!except` statement. + + This exception is guaranteed to be picklable. + """ + + __module__ = "psycopg" + + sqlstate: Optional[str] = None + + def __init__( + self, + *args: Sequence[Any], + info: ErrorInfo = None, + encoding: str = "utf-8", + pgconn: Optional[PGconn] = None + ): + super().__init__(*args) + self._info = info + self._encoding = encoding + self._pgconn = pgconn + + # Handle sqlstate codes for which we don't have a class. + if not self.sqlstate and info: + self.sqlstate = self.diag.sqlstate + + @property + def pgconn(self) -> Optional[PGconn]: + """The connection object, if the error was raised from a connection attempt. + + :rtype: Optional[psycopg.pq.PGconn] + """ + return self._pgconn if self._pgconn else None + + @property + def pgresult(self) -> Optional[PGresult]: + """The result object, if the exception was raised after a failed query. + + :rtype: Optional[psycopg.pq.PGresult] + """ + return self._info if _is_pgresult(self._info) else None + + @property + def diag(self) -> "Diagnostic": + """ + A `Diagnostic` object to inspect details of the errors from the database. + """ + return Diagnostic(self._info, encoding=self._encoding) + + def __reduce__(self) -> Union[str, Tuple[Any, ...]]: + res = super().__reduce__() + if isinstance(res, tuple) and len(res) >= 3: + # To make the exception picklable + res[2]["_info"] = _info_to_dict(self._info) + res[2]["_pgconn"] = None + + return res + + +class InterfaceError(Error): + """ + An error related to the database interface rather than the database itself. + """ + + __module__ = "psycopg" + + +class DatabaseError(Error): + """ + Exception raised for errors that are related to the database. + """ + + __module__ = "psycopg" + + def __init_subclass__(cls, code: Optional[str] = None, name: Optional[str] = None): + if code: + _sqlcodes[code] = cls + cls.sqlstate = code + if name: + _sqlcodes[name] = cls + + +class DataError(DatabaseError): + """ + An error caused by problems with the processed data. + + Examples may be division by zero, numeric value out of range, etc. + """ + + __module__ = "psycopg" + + +class OperationalError(DatabaseError): + """ + An error related to the database's operation. + + These errors are not necessarily under the control of the programmer, e.g. + an unexpected disconnect occurs, the data source name is not found, a + transaction could not be processed, a memory allocation error occurred + during processing, etc. + """ + + __module__ = "psycopg" + + +class IntegrityError(DatabaseError): + """ + An error caused when the relational integrity of the database is affected. + + An example may be a foreign key check failed. + """ + + __module__ = "psycopg" + + +class InternalError(DatabaseError): + """ + An error generated when the database encounters an internal error, + + Examples could be the cursor is not valid anymore, the transaction is out + of sync, etc. + """ + + __module__ = "psycopg" + + +class ProgrammingError(DatabaseError): + """ + Exception raised for programming errors + + Examples may be table not found or already exists, syntax error in the SQL + statement, wrong number of parameters specified, etc. + """ + + __module__ = "psycopg" + + +class NotSupportedError(DatabaseError): + """ + A method or database API was used which is not supported by the database. + """ + + __module__ = "psycopg" + + +class ConnectionTimeout(OperationalError): + """ + Exception raised on timeout of the `~psycopg.Connection.connect()` method. + + The error is raised if the ``connect_timeout`` is specified and a + connection is not obtained in useful time. + + Subclass of `~psycopg.OperationalError`. + """ + + +class PipelineAborted(OperationalError): + """ + Raised when a operation fails because the current pipeline is in aborted state. + + Subclass of `~psycopg.OperationalError`. + """ + + +class Diagnostic: + """Details from a database error report.""" + + def __init__(self, info: ErrorInfo, encoding: str = "utf-8"): + self._info = info + self._encoding = encoding + + @property + def severity(self) -> Optional[str]: + return self._error_message(DiagnosticField.SEVERITY) + + @property + def severity_nonlocalized(self) -> Optional[str]: + return self._error_message(DiagnosticField.SEVERITY_NONLOCALIZED) + + @property + def sqlstate(self) -> Optional[str]: + return self._error_message(DiagnosticField.SQLSTATE) + + @property + def message_primary(self) -> Optional[str]: + return self._error_message(DiagnosticField.MESSAGE_PRIMARY) + + @property + def message_detail(self) -> Optional[str]: + return self._error_message(DiagnosticField.MESSAGE_DETAIL) + + @property + def message_hint(self) -> Optional[str]: + return self._error_message(DiagnosticField.MESSAGE_HINT) + + @property + def statement_position(self) -> Optional[str]: + return self._error_message(DiagnosticField.STATEMENT_POSITION) + + @property + def internal_position(self) -> Optional[str]: + return self._error_message(DiagnosticField.INTERNAL_POSITION) + + @property + def internal_query(self) -> Optional[str]: + return self._error_message(DiagnosticField.INTERNAL_QUERY) + + @property + def context(self) -> Optional[str]: + return self._error_message(DiagnosticField.CONTEXT) + + @property + def schema_name(self) -> Optional[str]: + return self._error_message(DiagnosticField.SCHEMA_NAME) + + @property + def table_name(self) -> Optional[str]: + return self._error_message(DiagnosticField.TABLE_NAME) + + @property + def column_name(self) -> Optional[str]: + return self._error_message(DiagnosticField.COLUMN_NAME) + + @property + def datatype_name(self) -> Optional[str]: + return self._error_message(DiagnosticField.DATATYPE_NAME) + + @property + def constraint_name(self) -> Optional[str]: + return self._error_message(DiagnosticField.CONSTRAINT_NAME) + + @property + def source_file(self) -> Optional[str]: + return self._error_message(DiagnosticField.SOURCE_FILE) + + @property + def source_line(self) -> Optional[str]: + return self._error_message(DiagnosticField.SOURCE_LINE) + + @property + def source_function(self) -> Optional[str]: + return self._error_message(DiagnosticField.SOURCE_FUNCTION) + + def _error_message(self, field: DiagnosticField) -> Optional[str]: + if self._info: + if isinstance(self._info, dict): + val = self._info.get(field) + else: + val = self._info.error_field(field) + + if val is not None: + return val.decode(self._encoding, "replace") + + return None + + def __reduce__(self) -> Union[str, Tuple[Any, ...]]: + res = super().__reduce__() + if isinstance(res, tuple) and len(res) >= 3: + res[2]["_info"] = _info_to_dict(self._info) + + return res + + +def _info_to_dict(info: ErrorInfo) -> ErrorInfo: + """ + Convert a PGresult to a dictionary to make the info picklable. + """ + # PGresult is a protocol, can't use isinstance + if _is_pgresult(info): + return {v: info.error_field(v) for v in DiagnosticField} + else: + return info + + +def lookup(sqlstate: str) -> Type[Error]: + """Lookup an error code or `constant name`__ and return its exception class. + + Raise `!KeyError` if the code is not found. + + .. __: https://www.postgresql.org/docs/current/errcodes-appendix.html + #ERRCODES-TABLE + """ + return _sqlcodes[sqlstate.upper()] + + +def error_from_result(result: PGresult, encoding: str = "utf-8") -> Error: + from psycopg import pq + + state = result.error_field(DiagnosticField.SQLSTATE) or b"" + cls = _class_for_state(state.decode("ascii")) + return cls( + pq.error_message(result, encoding=encoding), + info=result, + encoding=encoding, + ) + + +def _is_pgresult(info: ErrorInfo) -> TypeGuard[PGresult]: + """Return True if an ErrorInfo is a PGresult instance.""" + # PGresult is a protocol, can't use isinstance + return hasattr(info, "error_field") + + +def _class_for_state(sqlstate: str) -> Type[Error]: + try: + return lookup(sqlstate) + except KeyError: + return get_base_exception(sqlstate) + + +def get_base_exception(sqlstate: str) -> Type[Error]: + return ( + _base_exc_map.get(sqlstate[:2]) + or _base_exc_map.get(sqlstate[:1]) + or DatabaseError + ) + + +_base_exc_map = { + "08": OperationalError, # Connection Exception + "0A": NotSupportedError, # Feature Not Supported + "20": ProgrammingError, # Case Not Foud + "21": ProgrammingError, # Cardinality Violation + "22": DataError, # Data Exception + "23": IntegrityError, # Integrity Constraint Violation + "24": InternalError, # Invalid Cursor State + "25": InternalError, # Invalid Transaction State + "26": ProgrammingError, # Invalid SQL Statement Name * + "27": OperationalError, # Triggered Data Change Violation + "28": OperationalError, # Invalid Authorization Specification + "2B": InternalError, # Dependent Privilege Descriptors Still Exist + "2D": InternalError, # Invalid Transaction Termination + "2F": OperationalError, # SQL Routine Exception * + "34": ProgrammingError, # Invalid Cursor Name * + "38": OperationalError, # External Routine Exception * + "39": OperationalError, # External Routine Invocation Exception * + "3B": OperationalError, # Savepoint Exception * + "3D": ProgrammingError, # Invalid Catalog Name + "3F": ProgrammingError, # Invalid Schema Name + "40": OperationalError, # Transaction Rollback + "42": ProgrammingError, # Syntax Error or Access Rule Violation + "44": ProgrammingError, # WITH CHECK OPTION Violation + "53": OperationalError, # Insufficient Resources + "54": OperationalError, # Program Limit Exceeded + "55": OperationalError, # Object Not In Prerequisite State + "57": OperationalError, # Operator Intervention + "58": OperationalError, # System Error (errors external to PostgreSQL itself) + "F": OperationalError, # Configuration File Error + "H": OperationalError, # Foreign Data Wrapper Error (SQL/MED) + "P": ProgrammingError, # PL/pgSQL Error + "X": InternalError, # Internal Error +} + + +# Error classes generated by tools/update_errors.py + +# fmt: off +# autogenerated: start + + +# Class 02 - No Data (this is also a warning class per the SQL standard) + +class NoData(DatabaseError, + code='02000', name='NO_DATA'): + pass + +class NoAdditionalDynamicResultSetsReturned(DatabaseError, + code='02001', name='NO_ADDITIONAL_DYNAMIC_RESULT_SETS_RETURNED'): + pass + + +# Class 03 - SQL Statement Not Yet Complete + +class SqlStatementNotYetComplete(DatabaseError, + code='03000', name='SQL_STATEMENT_NOT_YET_COMPLETE'): + pass + + +# Class 08 - Connection Exception + +class ConnectionException(OperationalError, + code='08000', name='CONNECTION_EXCEPTION'): + pass + +class SqlclientUnableToEstablishSqlconnection(OperationalError, + code='08001', name='SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION'): + pass + +class ConnectionDoesNotExist(OperationalError, + code='08003', name='CONNECTION_DOES_NOT_EXIST'): + pass + +class SqlserverRejectedEstablishmentOfSqlconnection(OperationalError, + code='08004', name='SQLSERVER_REJECTED_ESTABLISHMENT_OF_SQLCONNECTION'): + pass + +class ConnectionFailure(OperationalError, + code='08006', name='CONNECTION_FAILURE'): + pass + +class TransactionResolutionUnknown(OperationalError, + code='08007', name='TRANSACTION_RESOLUTION_UNKNOWN'): + pass + +class ProtocolViolation(OperationalError, + code='08P01', name='PROTOCOL_VIOLATION'): + pass + + +# Class 09 - Triggered Action Exception + +class TriggeredActionException(DatabaseError, + code='09000', name='TRIGGERED_ACTION_EXCEPTION'): + pass + + +# Class 0A - Feature Not Supported + +class FeatureNotSupported(NotSupportedError, + code='0A000', name='FEATURE_NOT_SUPPORTED'): + pass + + +# Class 0B - Invalid Transaction Initiation + +class InvalidTransactionInitiation(DatabaseError, + code='0B000', name='INVALID_TRANSACTION_INITIATION'): + pass + + +# Class 0F - Locator Exception + +class LocatorException(DatabaseError, + code='0F000', name='LOCATOR_EXCEPTION'): + pass + +class InvalidLocatorSpecification(DatabaseError, + code='0F001', name='INVALID_LOCATOR_SPECIFICATION'): + pass + + +# Class 0L - Invalid Grantor + +class InvalidGrantor(DatabaseError, + code='0L000', name='INVALID_GRANTOR'): + pass + +class InvalidGrantOperation(DatabaseError, + code='0LP01', name='INVALID_GRANT_OPERATION'): + pass + + +# Class 0P - Invalid Role Specification + +class InvalidRoleSpecification(DatabaseError, + code='0P000', name='INVALID_ROLE_SPECIFICATION'): + pass + + +# Class 0Z - Diagnostics Exception + +class DiagnosticsException(DatabaseError, + code='0Z000', name='DIAGNOSTICS_EXCEPTION'): + pass + +class StackedDiagnosticsAccessedWithoutActiveHandler(DatabaseError, + code='0Z002', name='STACKED_DIAGNOSTICS_ACCESSED_WITHOUT_ACTIVE_HANDLER'): + pass + + +# Class 20 - Case Not Found + +class CaseNotFound(ProgrammingError, + code='20000', name='CASE_NOT_FOUND'): + pass + + +# Class 21 - Cardinality Violation + +class CardinalityViolation(ProgrammingError, + code='21000', name='CARDINALITY_VIOLATION'): + pass + + +# Class 22 - Data Exception + +class DataException(DataError, + code='22000', name='DATA_EXCEPTION'): + pass + +class StringDataRightTruncation(DataError, + code='22001', name='STRING_DATA_RIGHT_TRUNCATION'): + pass + +class NullValueNoIndicatorParameter(DataError, + code='22002', name='NULL_VALUE_NO_INDICATOR_PARAMETER'): + pass + +class NumericValueOutOfRange(DataError, + code='22003', name='NUMERIC_VALUE_OUT_OF_RANGE'): + pass + +class NullValueNotAllowed(DataError, + code='22004', name='NULL_VALUE_NOT_ALLOWED'): + pass + +class ErrorInAssignment(DataError, + code='22005', name='ERROR_IN_ASSIGNMENT'): + pass + +class InvalidDatetimeFormat(DataError, + code='22007', name='INVALID_DATETIME_FORMAT'): + pass + +class DatetimeFieldOverflow(DataError, + code='22008', name='DATETIME_FIELD_OVERFLOW'): + pass + +class InvalidTimeZoneDisplacementValue(DataError, + code='22009', name='INVALID_TIME_ZONE_DISPLACEMENT_VALUE'): + pass + +class EscapeCharacterConflict(DataError, + code='2200B', name='ESCAPE_CHARACTER_CONFLICT'): + pass + +class InvalidUseOfEscapeCharacter(DataError, + code='2200C', name='INVALID_USE_OF_ESCAPE_CHARACTER'): + pass + +class InvalidEscapeOctet(DataError, + code='2200D', name='INVALID_ESCAPE_OCTET'): + pass + +class ZeroLengthCharacterString(DataError, + code='2200F', name='ZERO_LENGTH_CHARACTER_STRING'): + pass + +class MostSpecificTypeMismatch(DataError, + code='2200G', name='MOST_SPECIFIC_TYPE_MISMATCH'): + pass + +class SequenceGeneratorLimitExceeded(DataError, + code='2200H', name='SEQUENCE_GENERATOR_LIMIT_EXCEEDED'): + pass + +class NotAnXmlDocument(DataError, + code='2200L', name='NOT_AN_XML_DOCUMENT'): + pass + +class InvalidXmlDocument(DataError, + code='2200M', name='INVALID_XML_DOCUMENT'): + pass + +class InvalidXmlContent(DataError, + code='2200N', name='INVALID_XML_CONTENT'): + pass + +class InvalidXmlComment(DataError, + code='2200S', name='INVALID_XML_COMMENT'): + pass + +class InvalidXmlProcessingInstruction(DataError, + code='2200T', name='INVALID_XML_PROCESSING_INSTRUCTION'): + pass + +class InvalidIndicatorParameterValue(DataError, + code='22010', name='INVALID_INDICATOR_PARAMETER_VALUE'): + pass + +class SubstringError(DataError, + code='22011', name='SUBSTRING_ERROR'): + pass + +class DivisionByZero(DataError, + code='22012', name='DIVISION_BY_ZERO'): + pass + +class InvalidPrecedingOrFollowingSize(DataError, + code='22013', name='INVALID_PRECEDING_OR_FOLLOWING_SIZE'): + pass + +class InvalidArgumentForNtileFunction(DataError, + code='22014', name='INVALID_ARGUMENT_FOR_NTILE_FUNCTION'): + pass + +class IntervalFieldOverflow(DataError, + code='22015', name='INTERVAL_FIELD_OVERFLOW'): + pass + +class InvalidArgumentForNthValueFunction(DataError, + code='22016', name='INVALID_ARGUMENT_FOR_NTH_VALUE_FUNCTION'): + pass + +class InvalidCharacterValueForCast(DataError, + code='22018', name='INVALID_CHARACTER_VALUE_FOR_CAST'): + pass + +class InvalidEscapeCharacter(DataError, + code='22019', name='INVALID_ESCAPE_CHARACTER'): + pass + +class InvalidRegularExpression(DataError, + code='2201B', name='INVALID_REGULAR_EXPRESSION'): + pass + +class InvalidArgumentForLogarithm(DataError, + code='2201E', name='INVALID_ARGUMENT_FOR_LOGARITHM'): + pass + +class InvalidArgumentForPowerFunction(DataError, + code='2201F', name='INVALID_ARGUMENT_FOR_POWER_FUNCTION'): + pass + +class InvalidArgumentForWidthBucketFunction(DataError, + code='2201G', name='INVALID_ARGUMENT_FOR_WIDTH_BUCKET_FUNCTION'): + pass + +class InvalidRowCountInLimitClause(DataError, + code='2201W', name='INVALID_ROW_COUNT_IN_LIMIT_CLAUSE'): + pass + +class InvalidRowCountInResultOffsetClause(DataError, + code='2201X', name='INVALID_ROW_COUNT_IN_RESULT_OFFSET_CLAUSE'): + pass + +class CharacterNotInRepertoire(DataError, + code='22021', name='CHARACTER_NOT_IN_REPERTOIRE'): + pass + +class IndicatorOverflow(DataError, + code='22022', name='INDICATOR_OVERFLOW'): + pass + +class InvalidParameterValue(DataError, + code='22023', name='INVALID_PARAMETER_VALUE'): + pass + +class UnterminatedCString(DataError, + code='22024', name='UNTERMINATED_C_STRING'): + pass + +class InvalidEscapeSequence(DataError, + code='22025', name='INVALID_ESCAPE_SEQUENCE'): + pass + +class StringDataLengthMismatch(DataError, + code='22026', name='STRING_DATA_LENGTH_MISMATCH'): + pass + +class TrimError(DataError, + code='22027', name='TRIM_ERROR'): + pass + +class ArraySubscriptError(DataError, + code='2202E', name='ARRAY_SUBSCRIPT_ERROR'): + pass + +class InvalidTablesampleRepeat(DataError, + code='2202G', name='INVALID_TABLESAMPLE_REPEAT'): + pass + +class InvalidTablesampleArgument(DataError, + code='2202H', name='INVALID_TABLESAMPLE_ARGUMENT'): + pass + +class DuplicateJsonObjectKeyValue(DataError, + code='22030', name='DUPLICATE_JSON_OBJECT_KEY_VALUE'): + pass + +class InvalidArgumentForSqlJsonDatetimeFunction(DataError, + code='22031', name='INVALID_ARGUMENT_FOR_SQL_JSON_DATETIME_FUNCTION'): + pass + +class InvalidJsonText(DataError, + code='22032', name='INVALID_JSON_TEXT'): + pass + +class InvalidSqlJsonSubscript(DataError, + code='22033', name='INVALID_SQL_JSON_SUBSCRIPT'): + pass + +class MoreThanOneSqlJsonItem(DataError, + code='22034', name='MORE_THAN_ONE_SQL_JSON_ITEM'): + pass + +class NoSqlJsonItem(DataError, + code='22035', name='NO_SQL_JSON_ITEM'): + pass + +class NonNumericSqlJsonItem(DataError, + code='22036', name='NON_NUMERIC_SQL_JSON_ITEM'): + pass + +class NonUniqueKeysInAJsonObject(DataError, + code='22037', name='NON_UNIQUE_KEYS_IN_A_JSON_OBJECT'): + pass + +class SingletonSqlJsonItemRequired(DataError, + code='22038', name='SINGLETON_SQL_JSON_ITEM_REQUIRED'): + pass + +class SqlJsonArrayNotFound(DataError, + code='22039', name='SQL_JSON_ARRAY_NOT_FOUND'): + pass + +class SqlJsonMemberNotFound(DataError, + code='2203A', name='SQL_JSON_MEMBER_NOT_FOUND'): + pass + +class SqlJsonNumberNotFound(DataError, + code='2203B', name='SQL_JSON_NUMBER_NOT_FOUND'): + pass + +class SqlJsonObjectNotFound(DataError, + code='2203C', name='SQL_JSON_OBJECT_NOT_FOUND'): + pass + +class TooManyJsonArrayElements(DataError, + code='2203D', name='TOO_MANY_JSON_ARRAY_ELEMENTS'): + pass + +class TooManyJsonObjectMembers(DataError, + code='2203E', name='TOO_MANY_JSON_OBJECT_MEMBERS'): + pass + +class SqlJsonScalarRequired(DataError, + code='2203F', name='SQL_JSON_SCALAR_REQUIRED'): + pass + +class SqlJsonItemCannotBeCastToTargetType(DataError, + code='2203G', name='SQL_JSON_ITEM_CANNOT_BE_CAST_TO_TARGET_TYPE'): + pass + +class FloatingPointException(DataError, + code='22P01', name='FLOATING_POINT_EXCEPTION'): + pass + +class InvalidTextRepresentation(DataError, + code='22P02', name='INVALID_TEXT_REPRESENTATION'): + pass + +class InvalidBinaryRepresentation(DataError, + code='22P03', name='INVALID_BINARY_REPRESENTATION'): + pass + +class BadCopyFileFormat(DataError, + code='22P04', name='BAD_COPY_FILE_FORMAT'): + pass + +class UntranslatableCharacter(DataError, + code='22P05', name='UNTRANSLATABLE_CHARACTER'): + pass + +class NonstandardUseOfEscapeCharacter(DataError, + code='22P06', name='NONSTANDARD_USE_OF_ESCAPE_CHARACTER'): + pass + + +# Class 23 - Integrity Constraint Violation + +class IntegrityConstraintViolation(IntegrityError, + code='23000', name='INTEGRITY_CONSTRAINT_VIOLATION'): + pass + +class RestrictViolation(IntegrityError, + code='23001', name='RESTRICT_VIOLATION'): + pass + +class NotNullViolation(IntegrityError, + code='23502', name='NOT_NULL_VIOLATION'): + pass + +class ForeignKeyViolation(IntegrityError, + code='23503', name='FOREIGN_KEY_VIOLATION'): + pass + +class UniqueViolation(IntegrityError, + code='23505', name='UNIQUE_VIOLATION'): + pass + +class CheckViolation(IntegrityError, + code='23514', name='CHECK_VIOLATION'): + pass + +class ExclusionViolation(IntegrityError, + code='23P01', name='EXCLUSION_VIOLATION'): + pass + + +# Class 24 - Invalid Cursor State + +class InvalidCursorState(InternalError, + code='24000', name='INVALID_CURSOR_STATE'): + pass + + +# Class 25 - Invalid Transaction State + +class InvalidTransactionState(InternalError, + code='25000', name='INVALID_TRANSACTION_STATE'): + pass + +class ActiveSqlTransaction(InternalError, + code='25001', name='ACTIVE_SQL_TRANSACTION'): + pass + +class BranchTransactionAlreadyActive(InternalError, + code='25002', name='BRANCH_TRANSACTION_ALREADY_ACTIVE'): + pass + +class InappropriateAccessModeForBranchTransaction(InternalError, + code='25003', name='INAPPROPRIATE_ACCESS_MODE_FOR_BRANCH_TRANSACTION'): + pass + +class InappropriateIsolationLevelForBranchTransaction(InternalError, + code='25004', name='INAPPROPRIATE_ISOLATION_LEVEL_FOR_BRANCH_TRANSACTION'): + pass + +class NoActiveSqlTransactionForBranchTransaction(InternalError, + code='25005', name='NO_ACTIVE_SQL_TRANSACTION_FOR_BRANCH_TRANSACTION'): + pass + +class ReadOnlySqlTransaction(InternalError, + code='25006', name='READ_ONLY_SQL_TRANSACTION'): + pass + +class SchemaAndDataStatementMixingNotSupported(InternalError, + code='25007', name='SCHEMA_AND_DATA_STATEMENT_MIXING_NOT_SUPPORTED'): + pass + +class HeldCursorRequiresSameIsolationLevel(InternalError, + code='25008', name='HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL'): + pass + +class NoActiveSqlTransaction(InternalError, + code='25P01', name='NO_ACTIVE_SQL_TRANSACTION'): + pass + +class InFailedSqlTransaction(InternalError, + code='25P02', name='IN_FAILED_SQL_TRANSACTION'): + pass + +class IdleInTransactionSessionTimeout(InternalError, + code='25P03', name='IDLE_IN_TRANSACTION_SESSION_TIMEOUT'): + pass + + +# Class 26 - Invalid SQL Statement Name + +class InvalidSqlStatementName(ProgrammingError, + code='26000', name='INVALID_SQL_STATEMENT_NAME'): + pass + + +# Class 27 - Triggered Data Change Violation + +class TriggeredDataChangeViolation(OperationalError, + code='27000', name='TRIGGERED_DATA_CHANGE_VIOLATION'): + pass + + +# Class 28 - Invalid Authorization Specification + +class InvalidAuthorizationSpecification(OperationalError, + code='28000', name='INVALID_AUTHORIZATION_SPECIFICATION'): + pass + +class InvalidPassword(OperationalError, + code='28P01', name='INVALID_PASSWORD'): + pass + + +# Class 2B - Dependent Privilege Descriptors Still Exist + +class DependentPrivilegeDescriptorsStillExist(InternalError, + code='2B000', name='DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST'): + pass + +class DependentObjectsStillExist(InternalError, + code='2BP01', name='DEPENDENT_OBJECTS_STILL_EXIST'): + pass + + +# Class 2D - Invalid Transaction Termination + +class InvalidTransactionTermination(InternalError, + code='2D000', name='INVALID_TRANSACTION_TERMINATION'): + pass + + +# Class 2F - SQL Routine Exception + +class SqlRoutineException(OperationalError, + code='2F000', name='SQL_ROUTINE_EXCEPTION'): + pass + +class ModifyingSqlDataNotPermitted(OperationalError, + code='2F002', name='MODIFYING_SQL_DATA_NOT_PERMITTED'): + pass + +class ProhibitedSqlStatementAttempted(OperationalError, + code='2F003', name='PROHIBITED_SQL_STATEMENT_ATTEMPTED'): + pass + +class ReadingSqlDataNotPermitted(OperationalError, + code='2F004', name='READING_SQL_DATA_NOT_PERMITTED'): + pass + +class FunctionExecutedNoReturnStatement(OperationalError, + code='2F005', name='FUNCTION_EXECUTED_NO_RETURN_STATEMENT'): + pass + + +# Class 34 - Invalid Cursor Name + +class InvalidCursorName(ProgrammingError, + code='34000', name='INVALID_CURSOR_NAME'): + pass + + +# Class 38 - External Routine Exception + +class ExternalRoutineException(OperationalError, + code='38000', name='EXTERNAL_ROUTINE_EXCEPTION'): + pass + +class ContainingSqlNotPermitted(OperationalError, + code='38001', name='CONTAINING_SQL_NOT_PERMITTED'): + pass + +class ModifyingSqlDataNotPermittedExt(OperationalError, + code='38002', name='MODIFYING_SQL_DATA_NOT_PERMITTED'): + pass + +class ProhibitedSqlStatementAttemptedExt(OperationalError, + code='38003', name='PROHIBITED_SQL_STATEMENT_ATTEMPTED'): + pass + +class ReadingSqlDataNotPermittedExt(OperationalError, + code='38004', name='READING_SQL_DATA_NOT_PERMITTED'): + pass + + +# Class 39 - External Routine Invocation Exception + +class ExternalRoutineInvocationException(OperationalError, + code='39000', name='EXTERNAL_ROUTINE_INVOCATION_EXCEPTION'): + pass + +class InvalidSqlstateReturned(OperationalError, + code='39001', name='INVALID_SQLSTATE_RETURNED'): + pass + +class NullValueNotAllowedExt(OperationalError, + code='39004', name='NULL_VALUE_NOT_ALLOWED'): + pass + +class TriggerProtocolViolated(OperationalError, + code='39P01', name='TRIGGER_PROTOCOL_VIOLATED'): + pass + +class SrfProtocolViolated(OperationalError, + code='39P02', name='SRF_PROTOCOL_VIOLATED'): + pass + +class EventTriggerProtocolViolated(OperationalError, + code='39P03', name='EVENT_TRIGGER_PROTOCOL_VIOLATED'): + pass + + +# Class 3B - Savepoint Exception + +class SavepointException(OperationalError, + code='3B000', name='SAVEPOINT_EXCEPTION'): + pass + +class InvalidSavepointSpecification(OperationalError, + code='3B001', name='INVALID_SAVEPOINT_SPECIFICATION'): + pass + + +# Class 3D - Invalid Catalog Name + +class InvalidCatalogName(ProgrammingError, + code='3D000', name='INVALID_CATALOG_NAME'): + pass + + +# Class 3F - Invalid Schema Name + +class InvalidSchemaName(ProgrammingError, + code='3F000', name='INVALID_SCHEMA_NAME'): + pass + + +# Class 40 - Transaction Rollback + +class TransactionRollback(OperationalError, + code='40000', name='TRANSACTION_ROLLBACK'): + pass + +class SerializationFailure(OperationalError, + code='40001', name='SERIALIZATION_FAILURE'): + pass + +class TransactionIntegrityConstraintViolation(OperationalError, + code='40002', name='TRANSACTION_INTEGRITY_CONSTRAINT_VIOLATION'): + pass + +class StatementCompletionUnknown(OperationalError, + code='40003', name='STATEMENT_COMPLETION_UNKNOWN'): + pass + +class DeadlockDetected(OperationalError, + code='40P01', name='DEADLOCK_DETECTED'): + pass + + +# Class 42 - Syntax Error or Access Rule Violation + +class SyntaxErrorOrAccessRuleViolation(ProgrammingError, + code='42000', name='SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION'): + pass + +class InsufficientPrivilege(ProgrammingError, + code='42501', name='INSUFFICIENT_PRIVILEGE'): + pass + +class SyntaxError(ProgrammingError, + code='42601', name='SYNTAX_ERROR'): + pass + +class InvalidName(ProgrammingError, + code='42602', name='INVALID_NAME'): + pass + +class InvalidColumnDefinition(ProgrammingError, + code='42611', name='INVALID_COLUMN_DEFINITION'): + pass + +class NameTooLong(ProgrammingError, + code='42622', name='NAME_TOO_LONG'): + pass + +class DuplicateColumn(ProgrammingError, + code='42701', name='DUPLICATE_COLUMN'): + pass + +class AmbiguousColumn(ProgrammingError, + code='42702', name='AMBIGUOUS_COLUMN'): + pass + +class UndefinedColumn(ProgrammingError, + code='42703', name='UNDEFINED_COLUMN'): + pass + +class UndefinedObject(ProgrammingError, + code='42704', name='UNDEFINED_OBJECT'): + pass + +class DuplicateObject(ProgrammingError, + code='42710', name='DUPLICATE_OBJECT'): + pass + +class DuplicateAlias(ProgrammingError, + code='42712', name='DUPLICATE_ALIAS'): + pass + +class DuplicateFunction(ProgrammingError, + code='42723', name='DUPLICATE_FUNCTION'): + pass + +class AmbiguousFunction(ProgrammingError, + code='42725', name='AMBIGUOUS_FUNCTION'): + pass + +class GroupingError(ProgrammingError, + code='42803', name='GROUPING_ERROR'): + pass + +class DatatypeMismatch(ProgrammingError, + code='42804', name='DATATYPE_MISMATCH'): + pass + +class WrongObjectType(ProgrammingError, + code='42809', name='WRONG_OBJECT_TYPE'): + pass + +class InvalidForeignKey(ProgrammingError, + code='42830', name='INVALID_FOREIGN_KEY'): + pass + +class CannotCoerce(ProgrammingError, + code='42846', name='CANNOT_COERCE'): + pass + +class UndefinedFunction(ProgrammingError, + code='42883', name='UNDEFINED_FUNCTION'): + pass + +class GeneratedAlways(ProgrammingError, + code='428C9', name='GENERATED_ALWAYS'): + pass + +class ReservedName(ProgrammingError, + code='42939', name='RESERVED_NAME'): + pass + +class UndefinedTable(ProgrammingError, + code='42P01', name='UNDEFINED_TABLE'): + pass + +class UndefinedParameter(ProgrammingError, + code='42P02', name='UNDEFINED_PARAMETER'): + pass + +class DuplicateCursor(ProgrammingError, + code='42P03', name='DUPLICATE_CURSOR'): + pass + +class DuplicateDatabase(ProgrammingError, + code='42P04', name='DUPLICATE_DATABASE'): + pass + +class DuplicatePreparedStatement(ProgrammingError, + code='42P05', name='DUPLICATE_PREPARED_STATEMENT'): + pass + +class DuplicateSchema(ProgrammingError, + code='42P06', name='DUPLICATE_SCHEMA'): + pass + +class DuplicateTable(ProgrammingError, + code='42P07', name='DUPLICATE_TABLE'): + pass + +class AmbiguousParameter(ProgrammingError, + code='42P08', name='AMBIGUOUS_PARAMETER'): + pass + +class AmbiguousAlias(ProgrammingError, + code='42P09', name='AMBIGUOUS_ALIAS'): + pass + +class InvalidColumnReference(ProgrammingError, + code='42P10', name='INVALID_COLUMN_REFERENCE'): + pass + +class InvalidCursorDefinition(ProgrammingError, + code='42P11', name='INVALID_CURSOR_DEFINITION'): + pass + +class InvalidDatabaseDefinition(ProgrammingError, + code='42P12', name='INVALID_DATABASE_DEFINITION'): + pass + +class InvalidFunctionDefinition(ProgrammingError, + code='42P13', name='INVALID_FUNCTION_DEFINITION'): + pass + +class InvalidPreparedStatementDefinition(ProgrammingError, + code='42P14', name='INVALID_PREPARED_STATEMENT_DEFINITION'): + pass + +class InvalidSchemaDefinition(ProgrammingError, + code='42P15', name='INVALID_SCHEMA_DEFINITION'): + pass + +class InvalidTableDefinition(ProgrammingError, + code='42P16', name='INVALID_TABLE_DEFINITION'): + pass + +class InvalidObjectDefinition(ProgrammingError, + code='42P17', name='INVALID_OBJECT_DEFINITION'): + pass + +class IndeterminateDatatype(ProgrammingError, + code='42P18', name='INDETERMINATE_DATATYPE'): + pass + +class InvalidRecursion(ProgrammingError, + code='42P19', name='INVALID_RECURSION'): + pass + +class WindowingError(ProgrammingError, + code='42P20', name='WINDOWING_ERROR'): + pass + +class CollationMismatch(ProgrammingError, + code='42P21', name='COLLATION_MISMATCH'): + pass + +class IndeterminateCollation(ProgrammingError, + code='42P22', name='INDETERMINATE_COLLATION'): + pass + + +# Class 44 - WITH CHECK OPTION Violation + +class WithCheckOptionViolation(ProgrammingError, + code='44000', name='WITH_CHECK_OPTION_VIOLATION'): + pass + + +# Class 53 - Insufficient Resources + +class InsufficientResources(OperationalError, + code='53000', name='INSUFFICIENT_RESOURCES'): + pass + +class DiskFull(OperationalError, + code='53100', name='DISK_FULL'): + pass + +class OutOfMemory(OperationalError, + code='53200', name='OUT_OF_MEMORY'): + pass + +class TooManyConnections(OperationalError, + code='53300', name='TOO_MANY_CONNECTIONS'): + pass + +class ConfigurationLimitExceeded(OperationalError, + code='53400', name='CONFIGURATION_LIMIT_EXCEEDED'): + pass + + +# Class 54 - Program Limit Exceeded + +class ProgramLimitExceeded(OperationalError, + code='54000', name='PROGRAM_LIMIT_EXCEEDED'): + pass + +class StatementTooComplex(OperationalError, + code='54001', name='STATEMENT_TOO_COMPLEX'): + pass + +class TooManyColumns(OperationalError, + code='54011', name='TOO_MANY_COLUMNS'): + pass + +class TooManyArguments(OperationalError, + code='54023', name='TOO_MANY_ARGUMENTS'): + pass + + +# Class 55 - Object Not In Prerequisite State + +class ObjectNotInPrerequisiteState(OperationalError, + code='55000', name='OBJECT_NOT_IN_PREREQUISITE_STATE'): + pass + +class ObjectInUse(OperationalError, + code='55006', name='OBJECT_IN_USE'): + pass + +class CantChangeRuntimeParam(OperationalError, + code='55P02', name='CANT_CHANGE_RUNTIME_PARAM'): + pass + +class LockNotAvailable(OperationalError, + code='55P03', name='LOCK_NOT_AVAILABLE'): + pass + +class UnsafeNewEnumValueUsage(OperationalError, + code='55P04', name='UNSAFE_NEW_ENUM_VALUE_USAGE'): + pass + + +# Class 57 - Operator Intervention + +class OperatorIntervention(OperationalError, + code='57000', name='OPERATOR_INTERVENTION'): + pass + +class QueryCanceled(OperationalError, + code='57014', name='QUERY_CANCELED'): + pass + +class AdminShutdown(OperationalError, + code='57P01', name='ADMIN_SHUTDOWN'): + pass + +class CrashShutdown(OperationalError, + code='57P02', name='CRASH_SHUTDOWN'): + pass + +class CannotConnectNow(OperationalError, + code='57P03', name='CANNOT_CONNECT_NOW'): + pass + +class DatabaseDropped(OperationalError, + code='57P04', name='DATABASE_DROPPED'): + pass + +class IdleSessionTimeout(OperationalError, + code='57P05', name='IDLE_SESSION_TIMEOUT'): + pass + + +# Class 58 - System Error (errors external to PostgreSQL itself) + +class SystemError(OperationalError, + code='58000', name='SYSTEM_ERROR'): + pass + +class IoError(OperationalError, + code='58030', name='IO_ERROR'): + pass + +class UndefinedFile(OperationalError, + code='58P01', name='UNDEFINED_FILE'): + pass + +class DuplicateFile(OperationalError, + code='58P02', name='DUPLICATE_FILE'): + pass + + +# Class 72 - Snapshot Failure + +class SnapshotTooOld(DatabaseError, + code='72000', name='SNAPSHOT_TOO_OLD'): + pass + + +# Class F0 - Configuration File Error + +class ConfigFileError(OperationalError, + code='F0000', name='CONFIG_FILE_ERROR'): + pass + +class LockFileExists(OperationalError, + code='F0001', name='LOCK_FILE_EXISTS'): + pass + + +# Class HV - Foreign Data Wrapper Error (SQL/MED) + +class FdwError(OperationalError, + code='HV000', name='FDW_ERROR'): + pass + +class FdwOutOfMemory(OperationalError, + code='HV001', name='FDW_OUT_OF_MEMORY'): + pass + +class FdwDynamicParameterValueNeeded(OperationalError, + code='HV002', name='FDW_DYNAMIC_PARAMETER_VALUE_NEEDED'): + pass + +class FdwInvalidDataType(OperationalError, + code='HV004', name='FDW_INVALID_DATA_TYPE'): + pass + +class FdwColumnNameNotFound(OperationalError, + code='HV005', name='FDW_COLUMN_NAME_NOT_FOUND'): + pass + +class FdwInvalidDataTypeDescriptors(OperationalError, + code='HV006', name='FDW_INVALID_DATA_TYPE_DESCRIPTORS'): + pass + +class FdwInvalidColumnName(OperationalError, + code='HV007', name='FDW_INVALID_COLUMN_NAME'): + pass + +class FdwInvalidColumnNumber(OperationalError, + code='HV008', name='FDW_INVALID_COLUMN_NUMBER'): + pass + +class FdwInvalidUseOfNullPointer(OperationalError, + code='HV009', name='FDW_INVALID_USE_OF_NULL_POINTER'): + pass + +class FdwInvalidStringFormat(OperationalError, + code='HV00A', name='FDW_INVALID_STRING_FORMAT'): + pass + +class FdwInvalidHandle(OperationalError, + code='HV00B', name='FDW_INVALID_HANDLE'): + pass + +class FdwInvalidOptionIndex(OperationalError, + code='HV00C', name='FDW_INVALID_OPTION_INDEX'): + pass + +class FdwInvalidOptionName(OperationalError, + code='HV00D', name='FDW_INVALID_OPTION_NAME'): + pass + +class FdwOptionNameNotFound(OperationalError, + code='HV00J', name='FDW_OPTION_NAME_NOT_FOUND'): + pass + +class FdwReplyHandle(OperationalError, + code='HV00K', name='FDW_REPLY_HANDLE'): + pass + +class FdwUnableToCreateExecution(OperationalError, + code='HV00L', name='FDW_UNABLE_TO_CREATE_EXECUTION'): + pass + +class FdwUnableToCreateReply(OperationalError, + code='HV00M', name='FDW_UNABLE_TO_CREATE_REPLY'): + pass + +class FdwUnableToEstablishConnection(OperationalError, + code='HV00N', name='FDW_UNABLE_TO_ESTABLISH_CONNECTION'): + pass + +class FdwNoSchemas(OperationalError, + code='HV00P', name='FDW_NO_SCHEMAS'): + pass + +class FdwSchemaNotFound(OperationalError, + code='HV00Q', name='FDW_SCHEMA_NOT_FOUND'): + pass + +class FdwTableNotFound(OperationalError, + code='HV00R', name='FDW_TABLE_NOT_FOUND'): + pass + +class FdwFunctionSequenceError(OperationalError, + code='HV010', name='FDW_FUNCTION_SEQUENCE_ERROR'): + pass + +class FdwTooManyHandles(OperationalError, + code='HV014', name='FDW_TOO_MANY_HANDLES'): + pass + +class FdwInconsistentDescriptorInformation(OperationalError, + code='HV021', name='FDW_INCONSISTENT_DESCRIPTOR_INFORMATION'): + pass + +class FdwInvalidAttributeValue(OperationalError, + code='HV024', name='FDW_INVALID_ATTRIBUTE_VALUE'): + pass + +class FdwInvalidStringLengthOrBufferLength(OperationalError, + code='HV090', name='FDW_INVALID_STRING_LENGTH_OR_BUFFER_LENGTH'): + pass + +class FdwInvalidDescriptorFieldIdentifier(OperationalError, + code='HV091', name='FDW_INVALID_DESCRIPTOR_FIELD_IDENTIFIER'): + pass + + +# Class P0 - PL/pgSQL Error + +class PlpgsqlError(ProgrammingError, + code='P0000', name='PLPGSQL_ERROR'): + pass + +class RaiseException(ProgrammingError, + code='P0001', name='RAISE_EXCEPTION'): + pass + +class NoDataFound(ProgrammingError, + code='P0002', name='NO_DATA_FOUND'): + pass + +class TooManyRows(ProgrammingError, + code='P0003', name='TOO_MANY_ROWS'): + pass + +class AssertFailure(ProgrammingError, + code='P0004', name='ASSERT_FAILURE'): + pass + + +# Class XX - Internal Error + +class InternalError_(InternalError, + code='XX000', name='INTERNAL_ERROR'): + pass + +class DataCorrupted(InternalError, + code='XX001', name='DATA_CORRUPTED'): + pass + +class IndexCorrupted(InternalError, + code='XX002', name='INDEX_CORRUPTED'): + pass + + +# autogenerated: end +# fmt: on + +# Don't show a complete traceback upon raising these exception. +# Usually the traceback starts from internal functions (for instance in the +# server communication callbacks) but, for the end user, it's more important +# to get the high level information about where the exception was raised, for +# instance in a certain `Cursor.execute()`. + +_NO_TRACEBACK = (Error, KeyboardInterrupt, CancelledError) diff --git a/lib/python3.11/site-packages/psycopg/generators.py b/lib/python3.11/site-packages/psycopg/generators.py new file mode 100644 index 0000000..584fe47 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/generators.py @@ -0,0 +1,320 @@ +""" +Generators implementing communication protocols with the libpq + +Certain operations (connection, querying) are an interleave of libpq calls and +waiting for the socket to be ready. This module contains the code to execute +the operations, yielding a polling state whenever there is to wait. The +functions in the `waiting` module are the ones who wait more or less +cooperatively for the socket to be ready and make these generators continue. + +All these generators yield pairs (fileno, `Wait`) whenever an operation would +block. The generator can be restarted sending the appropriate `Ready` state +when the file descriptor is ready. + +""" + +# Copyright (C) 2020 The Psycopg Team + +import logging +from typing import List, Optional, Union + +from . import pq +from . import errors as e +from .abc import Buffer, PipelineCommand, PQGen, PQGenConn +from .pq.abc import PGconn, PGresult +from .waiting import Wait, Ready +from ._compat import Deque +from ._cmodule import _psycopg +from ._encodings import pgconn_encoding, conninfo_encoding + +OK = pq.ConnStatus.OK +BAD = pq.ConnStatus.BAD + +POLL_OK = pq.PollingStatus.OK +POLL_READING = pq.PollingStatus.READING +POLL_WRITING = pq.PollingStatus.WRITING +POLL_FAILED = pq.PollingStatus.FAILED + +COMMAND_OK = pq.ExecStatus.COMMAND_OK +COPY_OUT = pq.ExecStatus.COPY_OUT +COPY_IN = pq.ExecStatus.COPY_IN +COPY_BOTH = pq.ExecStatus.COPY_BOTH +PIPELINE_SYNC = pq.ExecStatus.PIPELINE_SYNC + +WAIT_R = Wait.R +WAIT_W = Wait.W +WAIT_RW = Wait.RW +READY_R = Ready.R +READY_W = Ready.W +READY_RW = Ready.RW + +logger = logging.getLogger(__name__) + + +def _connect(conninfo: str) -> PQGenConn[PGconn]: + """ + Generator to create a database connection without blocking. + + """ + conn = pq.PGconn.connect_start(conninfo.encode()) + while True: + if conn.status == BAD: + encoding = conninfo_encoding(conninfo) + raise e.OperationalError( + f"connection is bad: {pq.error_message(conn, encoding=encoding)}", + pgconn=conn, + ) + + status = conn.connect_poll() + if status == POLL_OK: + break + elif status == POLL_READING: + yield conn.socket, WAIT_R + elif status == POLL_WRITING: + yield conn.socket, WAIT_W + elif status == POLL_FAILED: + encoding = conninfo_encoding(conninfo) + raise e.OperationalError( + f"connection failed: {pq.error_message(conn, encoding=encoding)}", + pgconn=conn, + ) + else: + raise e.InternalError(f"unexpected poll status: {status}", pgconn=conn) + + conn.nonblocking = 1 + return conn + + +def _execute(pgconn: PGconn) -> PQGen[List[PGresult]]: + """ + Generator sending a query and returning results without blocking. + + The query must have already been sent using `pgconn.send_query()` or + similar. Flush the query and then return the result using nonblocking + functions. + + Return the list of results returned by the database (whether success + or error). + """ + yield from _send(pgconn) + rv = yield from _fetch_many(pgconn) + return rv + + +def _send(pgconn: PGconn) -> PQGen[None]: + """ + Generator to send a query to the server without blocking. + + The query must have already been sent using `pgconn.send_query()` or + similar. Flush the query and then return the result using nonblocking + functions. + + After this generator has finished you may want to cycle using `fetch()` + to retrieve the results available. + """ + while True: + f = pgconn.flush() + if f == 0: + break + + ready = yield WAIT_RW + if ready & READY_R: + # This call may read notifies: they will be saved in the + # PGconn buffer and passed to Python later, in `fetch()`. + pgconn.consume_input() + + +def _fetch_many(pgconn: PGconn) -> PQGen[List[PGresult]]: + """ + Generator retrieving results from the database without blocking. + + The query must have already been sent to the server, so pgconn.flush() has + already returned 0. + + Return the list of results returned by the database (whether success + or error). + """ + results: List[PGresult] = [] + while True: + res = yield from _fetch(pgconn) + if not res: + break + + results.append(res) + status = res.status + if status == COPY_IN or status == COPY_OUT or status == COPY_BOTH: + # After entering copy mode the libpq will create a phony result + # for every request so let's break the endless loop. + break + + if status == PIPELINE_SYNC: + # PIPELINE_SYNC is not followed by a NULL, but we return it alone + # similarly to other result sets. + assert len(results) == 1, results + break + + return results + + +def _fetch(pgconn: PGconn) -> PQGen[Optional[PGresult]]: + """ + Generator retrieving a single result from the database without blocking. + + The query must have already been sent to the server, so pgconn.flush() has + already returned 0. + + Return a result from the database (whether success or error). + """ + if pgconn.is_busy(): + yield WAIT_R + while True: + pgconn.consume_input() + if not pgconn.is_busy(): + break + yield WAIT_R + + _consume_notifies(pgconn) + + return pgconn.get_result() + + +def _pipeline_communicate( + pgconn: PGconn, commands: Deque[PipelineCommand] +) -> PQGen[List[List[PGresult]]]: + """Generator to send queries from a connection in pipeline mode while also + receiving results. + + Return a list results, including single PIPELINE_SYNC elements. + """ + results = [] + + while True: + ready = yield WAIT_RW + + if ready & READY_R: + pgconn.consume_input() + _consume_notifies(pgconn) + + res: List[PGresult] = [] + while not pgconn.is_busy(): + r = pgconn.get_result() + if r is None: + if not res: + break + results.append(res) + res = [] + elif r.status == PIPELINE_SYNC: + assert not res + results.append([r]) + else: + res.append(r) + + if ready & READY_W: + pgconn.flush() + if not commands: + break + commands.popleft()() + + return results + + +def _consume_notifies(pgconn: PGconn) -> None: + # Consume notifies + while True: + n = pgconn.notifies() + if not n: + break + if pgconn.notify_handler: + pgconn.notify_handler(n) + + +def notifies(pgconn: PGconn) -> PQGen[List[pq.PGnotify]]: + yield WAIT_R + pgconn.consume_input() + + ns = [] + while True: + n = pgconn.notifies() + if n: + ns.append(n) + else: + break + + return ns + + +def copy_from(pgconn: PGconn) -> PQGen[Union[memoryview, PGresult]]: + while True: + nbytes, data = pgconn.get_copy_data(1) + if nbytes != 0: + break + + # would block + yield WAIT_R + pgconn.consume_input() + + if nbytes > 0: + # some data + return data + + # Retrieve the final result of copy + results = yield from _fetch_many(pgconn) + if len(results) > 1: + # TODO: too brutal? Copy worked. + raise e.ProgrammingError("you cannot mix COPY with other operations") + result = results[0] + if result.status != COMMAND_OK: + encoding = pgconn_encoding(pgconn) + raise e.error_from_result(result, encoding=encoding) + + return result + + +def copy_to(pgconn: PGconn, buffer: Buffer) -> PQGen[None]: + # Retry enqueuing data until successful. + # + # WARNING! This can cause an infinite loop if the buffer is too large. (see + # ticket #255). We avoid it in the Copy object by splitting a large buffer + # into smaller ones. We prefer to do it there instead of here in order to + # do it upstream the queue decoupling the writer task from the producer one. + while pgconn.put_copy_data(buffer) == 0: + yield WAIT_W + + +def copy_end(pgconn: PGconn, error: Optional[bytes]) -> PQGen[PGresult]: + # Retry enqueuing end copy message until successful + while pgconn.put_copy_end(error) == 0: + yield WAIT_W + + # Repeat until it the message is flushed to the server + while True: + yield WAIT_W + f = pgconn.flush() + if f == 0: + break + + # Retrieve the final result of copy + (result,) = yield from _fetch_many(pgconn) + if result.status != COMMAND_OK: + encoding = pgconn_encoding(pgconn) + raise e.error_from_result(result, encoding=encoding) + + return result + + +# Override functions with fast versions if available +if _psycopg: + connect = _psycopg.connect + execute = _psycopg.execute + send = _psycopg.send + fetch_many = _psycopg.fetch_many + fetch = _psycopg.fetch + pipeline_communicate = _psycopg.pipeline_communicate + +else: + connect = _connect + execute = _execute + send = _send + fetch_many = _fetch_many + fetch = _fetch + pipeline_communicate = _pipeline_communicate diff --git a/lib/python3.11/site-packages/psycopg/postgres.py b/lib/python3.11/site-packages/psycopg/postgres.py new file mode 100644 index 0000000..976533a --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/postgres.py @@ -0,0 +1,124 @@ +""" +Types configuration specific to PostgreSQL. +""" + +# Copyright (C) 2020 The Psycopg Team + +from ._typeinfo import TypeInfo, RangeInfo, MultirangeInfo, TypesRegistry +from .abc import AdaptContext +from ._adapters_map import AdaptersMap + +# Global objects with PostgreSQL builtins and globally registered user types. +types = TypesRegistry() + +# Global adapter maps with PostgreSQL types configuration +adapters = AdaptersMap(types=types) + +# Use tools/update_oids.py to update this data. +for t in [ + TypeInfo('"char"', 18, 1002), + # autogenerated: start + # Generated from PostgreSQL 15.0 + TypeInfo("aclitem", 1033, 1034), + TypeInfo("bit", 1560, 1561), + TypeInfo("bool", 16, 1000, regtype="boolean"), + TypeInfo("box", 603, 1020, delimiter=";"), + TypeInfo("bpchar", 1042, 1014, regtype="character"), + TypeInfo("bytea", 17, 1001), + TypeInfo("cid", 29, 1012), + TypeInfo("cidr", 650, 651), + TypeInfo("circle", 718, 719), + TypeInfo("date", 1082, 1182), + TypeInfo("float4", 700, 1021, regtype="real"), + TypeInfo("float8", 701, 1022, regtype="double precision"), + TypeInfo("gtsvector", 3642, 3644), + TypeInfo("inet", 869, 1041), + TypeInfo("int2", 21, 1005, regtype="smallint"), + TypeInfo("int2vector", 22, 1006), + TypeInfo("int4", 23, 1007, regtype="integer"), + TypeInfo("int8", 20, 1016, regtype="bigint"), + TypeInfo("interval", 1186, 1187), + TypeInfo("json", 114, 199), + TypeInfo("jsonb", 3802, 3807), + TypeInfo("jsonpath", 4072, 4073), + TypeInfo("line", 628, 629), + TypeInfo("lseg", 601, 1018), + TypeInfo("macaddr", 829, 1040), + TypeInfo("macaddr8", 774, 775), + TypeInfo("money", 790, 791), + TypeInfo("name", 19, 1003), + TypeInfo("numeric", 1700, 1231), + TypeInfo("oid", 26, 1028), + TypeInfo("oidvector", 30, 1013), + TypeInfo("path", 602, 1019), + TypeInfo("pg_lsn", 3220, 3221), + TypeInfo("point", 600, 1017), + TypeInfo("polygon", 604, 1027), + TypeInfo("record", 2249, 2287), + TypeInfo("refcursor", 1790, 2201), + TypeInfo("regclass", 2205, 2210), + TypeInfo("regcollation", 4191, 4192), + TypeInfo("regconfig", 3734, 3735), + TypeInfo("regdictionary", 3769, 3770), + TypeInfo("regnamespace", 4089, 4090), + TypeInfo("regoper", 2203, 2208), + TypeInfo("regoperator", 2204, 2209), + TypeInfo("regproc", 24, 1008), + TypeInfo("regprocedure", 2202, 2207), + TypeInfo("regrole", 4096, 4097), + TypeInfo("regtype", 2206, 2211), + TypeInfo("text", 25, 1009), + TypeInfo("tid", 27, 1010), + TypeInfo("time", 1083, 1183, regtype="time without time zone"), + TypeInfo("timestamp", 1114, 1115, regtype="timestamp without time zone"), + TypeInfo("timestamptz", 1184, 1185, regtype="timestamp with time zone"), + TypeInfo("timetz", 1266, 1270, regtype="time with time zone"), + TypeInfo("tsquery", 3615, 3645), + TypeInfo("tsvector", 3614, 3643), + TypeInfo("txid_snapshot", 2970, 2949), + TypeInfo("uuid", 2950, 2951), + TypeInfo("varbit", 1562, 1563, regtype="bit varying"), + TypeInfo("varchar", 1043, 1015, regtype="character varying"), + TypeInfo("xid", 28, 1011), + TypeInfo("xid8", 5069, 271), + TypeInfo("xml", 142, 143), + RangeInfo("daterange", 3912, 3913, subtype_oid=1082), + RangeInfo("int4range", 3904, 3905, subtype_oid=23), + RangeInfo("int8range", 3926, 3927, subtype_oid=20), + RangeInfo("numrange", 3906, 3907, subtype_oid=1700), + RangeInfo("tsrange", 3908, 3909, subtype_oid=1114), + RangeInfo("tstzrange", 3910, 3911, subtype_oid=1184), + MultirangeInfo("datemultirange", 4535, 6155, range_oid=3912, subtype_oid=1082), + MultirangeInfo("int4multirange", 4451, 6150, range_oid=3904, subtype_oid=23), + MultirangeInfo("int8multirange", 4536, 6157, range_oid=3926, subtype_oid=20), + MultirangeInfo("nummultirange", 4532, 6151, range_oid=3906, subtype_oid=1700), + MultirangeInfo("tsmultirange", 4533, 6152, range_oid=3908, subtype_oid=1114), + MultirangeInfo("tstzmultirange", 4534, 6153, range_oid=3910, subtype_oid=1184), + # autogenerated: end +]: + types.add(t) + + +# A few oids used a bit everywhere +INVALID_OID = 0 +TEXT_OID = types["text"].oid +TEXT_ARRAY_OID = types["text"].array_oid + + +def register_default_adapters(context: AdaptContext) -> None: + from .types import array, bool, composite, datetime, enum, json, multirange + from .types import net, none, numeric, range, string, uuid + + array.register_default_adapters(context) + bool.register_default_adapters(context) + composite.register_default_adapters(context) + datetime.register_default_adapters(context) + enum.register_default_adapters(context) + json.register_default_adapters(context) + multirange.register_default_adapters(context) + net.register_default_adapters(context) + none.register_default_adapters(context) + numeric.register_default_adapters(context) + range.register_default_adapters(context) + string.register_default_adapters(context) + uuid.register_default_adapters(context) diff --git a/lib/python3.11/site-packages/psycopg/pq/__init__.py b/lib/python3.11/site-packages/psycopg/pq/__init__.py new file mode 100644 index 0000000..0048ebb --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/pq/__init__.py @@ -0,0 +1,133 @@ +""" +psycopg libpq wrapper + +This package exposes the libpq functionalities as Python objects and functions. + +The real implementation (the binding to the C library) is +implementation-dependant but all the implementations share the same interface. +""" + +# Copyright (C) 2020 The Psycopg Team + +import os +import logging +from typing import Callable, List, Type + +from . import abc +from .misc import ConninfoOption, PGnotify, PGresAttDesc +from .misc import error_message +from ._enums import ConnStatus, DiagnosticField, ExecStatus, Format, Trace +from ._enums import Ping, PipelineStatus, PollingStatus, TransactionStatus + +logger = logging.getLogger(__name__) + +__impl__: str +"""The currently loaded implementation of the `!psycopg.pq` package. + +Possible values include ``python``, ``c``, ``binary``. +""" + +__build_version__: int +"""The libpq version the C package was built with. + +A number in the same format of `~psycopg.ConnectionInfo.server_version` +representing the libpq used to build the speedup module (``c``, ``binary``) if +available. + +Certain features might not be available if the built version is too old. +""" + +version: Callable[[], int] +PGconn: Type[abc.PGconn] +PGresult: Type[abc.PGresult] +Conninfo: Type[abc.Conninfo] +Escaping: Type[abc.Escaping] +PGcancel: Type[abc.PGcancel] + + +def import_from_libpq() -> None: + """ + Import pq objects implementation from the best libpq wrapper available. + + If an implementation is requested try to import only it, otherwise + try to import the best implementation available. + """ + # import these names into the module on success as side effect + global __impl__, version, __build_version__ + global PGconn, PGresult, Conninfo, Escaping, PGcancel + + impl = os.environ.get("PSYCOPG_IMPL", "").lower() + module = None + attempts: List[str] = [] + + def handle_error(name: str, e: Exception) -> None: + if not impl: + msg = f"couldn't import psycopg '{name}' implementation: {e}" + logger.debug(msg) + attempts.append(msg) + else: + msg = f"couldn't import requested psycopg '{name}' implementation: {e}" + raise ImportError(msg) from e + + # The best implementation: fast but requires the system libpq installed + if not impl or impl == "c": + try: + from psycopg_c import pq as module # type: ignore + except Exception as e: + handle_error("c", e) + + # Second best implementation: fast and stand-alone + if not module and (not impl or impl == "binary"): + try: + from psycopg_binary import pq as module # type: ignore + except Exception as e: + handle_error("binary", e) + + # Pure Python implementation, slow and requires the system libpq installed. + if not module and (not impl or impl == "python"): + try: + from . import pq_ctypes as module # type: ignore[assignment] + except Exception as e: + handle_error("python", e) + + if module: + __impl__ = module.__impl__ + version = module.version + PGconn = module.PGconn + PGresult = module.PGresult + Conninfo = module.Conninfo + Escaping = module.Escaping + PGcancel = module.PGcancel + __build_version__ = module.__build_version__ + elif impl: + raise ImportError(f"requested psycopg implementation '{impl}' unknown") + else: + sattempts = "\n".join(f"- {attempt}" for attempt in attempts) + raise ImportError( + f"""\ +no pq wrapper available. +Attempts made: +{sattempts}""" + ) + + +import_from_libpq() + +__all__ = ( + "ConnStatus", + "PipelineStatus", + "PollingStatus", + "TransactionStatus", + "ExecStatus", + "Ping", + "DiagnosticField", + "Format", + "Trace", + "PGconn", + "PGnotify", + "Conninfo", + "PGresAttDesc", + "error_message", + "ConninfoOption", + "version", +) diff --git a/lib/python3.11/site-packages/psycopg/pq/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/pq/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..2b9c986 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/pq/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/pq/__pycache__/_debug.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/pq/__pycache__/_debug.cpython-311.pyc new file mode 100644 index 0000000..f48d96f Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/pq/__pycache__/_debug.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/pq/__pycache__/_enums.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/pq/__pycache__/_enums.cpython-311.pyc new file mode 100644 index 0000000..d6bfaa9 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/pq/__pycache__/_enums.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/pq/__pycache__/_pq_ctypes.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/pq/__pycache__/_pq_ctypes.cpython-311.pyc new file mode 100644 index 0000000..27b4cf5 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/pq/__pycache__/_pq_ctypes.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/pq/__pycache__/abc.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/pq/__pycache__/abc.cpython-311.pyc new file mode 100644 index 0000000..19fe3b9 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/pq/__pycache__/abc.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/pq/__pycache__/misc.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/pq/__pycache__/misc.cpython-311.pyc new file mode 100644 index 0000000..82d3581 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/pq/__pycache__/misc.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/pq/__pycache__/pq_ctypes.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/pq/__pycache__/pq_ctypes.cpython-311.pyc new file mode 100644 index 0000000..c47b8ee Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/pq/__pycache__/pq_ctypes.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/pq/_debug.py b/lib/python3.11/site-packages/psycopg/pq/_debug.py new file mode 100644 index 0000000..f86f3bd --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/pq/_debug.py @@ -0,0 +1,106 @@ +""" +libpq debugging tools + +These functionalities are exposed here for convenience, but are not part of +the public interface and are subject to change at any moment. + +Suggested usage:: + + import logging + import psycopg + from psycopg import pq + from psycopg.pq._debug import PGconnDebug + + logging.basicConfig(level=logging.INFO, format="%(message)s") + logger = logging.getLogger("psycopg.debug") + logger.setLevel(logging.INFO) + + assert pq.__impl__ == "python" + pq.PGconn = PGconnDebug + + with psycopg.connect("") as conn: + conn.pgconn.trace(2) + conn.pgconn.set_trace_flags( + pq.Trace.SUPPRESS_TIMESTAMPS | pq.Trace.REGRESS_MODE) + ... + +""" + +# Copyright (C) 2022 The Psycopg Team + +import inspect +import logging +from typing import Any, Callable, Type, TypeVar, TYPE_CHECKING +from functools import wraps + +from . import PGconn +from .misc import connection_summary + +if TYPE_CHECKING: + from . import abc + +Func = TypeVar("Func", bound=Callable[..., Any]) + +logger = logging.getLogger("psycopg.debug") + + +class PGconnDebug: + """Wrapper for a PQconn logging all its access.""" + + _Self = TypeVar("_Self", bound="PGconnDebug") + _pgconn: "abc.PGconn" + + def __init__(self, pgconn: "abc.PGconn"): + super().__setattr__("_pgconn", pgconn) + + def __repr__(self) -> str: + cls = f"{self.__class__.__module__}.{self.__class__.__qualname__}" + info = connection_summary(self._pgconn) + return f"<{cls} {info} at 0x{id(self):x}>" + + def __getattr__(self, attr: str) -> Any: + value = getattr(self._pgconn, attr) + if callable(value): + return debugging(value) + else: + logger.info("PGconn.%s -> %s", attr, value) + return value + + def __setattr__(self, attr: str, value: Any) -> None: + setattr(self._pgconn, attr, value) + logger.info("PGconn.%s <- %s", attr, value) + + @classmethod + def connect(cls: Type[_Self], conninfo: bytes) -> _Self: + return cls(debugging(PGconn.connect)(conninfo)) + + @classmethod + def connect_start(cls: Type[_Self], conninfo: bytes) -> _Self: + return cls(debugging(PGconn.connect_start)(conninfo)) + + @classmethod + def ping(self, conninfo: bytes) -> int: + return debugging(PGconn.ping)(conninfo) + + +def debugging(f: Func) -> Func: + """Wrap a function in order to log its arguments and return value on call.""" + + @wraps(f) + def debugging_(*args: Any, **kwargs: Any) -> Any: + reprs = [] + for arg in args: + reprs.append(f"{arg!r}") + for k, v in kwargs.items(): + reprs.append(f"{k}={v!r}") + + logger.info("PGconn.%s(%s)", f.__name__, ", ".join(reprs)) + rv = f(*args, **kwargs) + # Display the return value only if the function is declared to return + # something else than None. + ra = inspect.signature(f).return_annotation + if ra is not None or rv is not None: + logger.info(" <- %r", rv) + return rv + + return debugging_ # type: ignore diff --git a/lib/python3.11/site-packages/psycopg/pq/_enums.py b/lib/python3.11/site-packages/psycopg/pq/_enums.py new file mode 100644 index 0000000..e0d4018 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/pq/_enums.py @@ -0,0 +1,249 @@ +""" +libpq enum definitions for psycopg +""" + +# Copyright (C) 2020 The Psycopg Team + +from enum import IntEnum, IntFlag, auto + + +class ConnStatus(IntEnum): + """ + Current status of the connection. + """ + + __module__ = "psycopg.pq" + + OK = 0 + """The connection is in a working state.""" + BAD = auto() + """The connection is closed.""" + + STARTED = auto() + MADE = auto() + AWAITING_RESPONSE = auto() + AUTH_OK = auto() + SETENV = auto() + SSL_STARTUP = auto() + NEEDED = auto() + CHECK_WRITABLE = auto() + CONSUME = auto() + GSS_STARTUP = auto() + CHECK_TARGET = auto() + CHECK_STANDBY = auto() + + +class PollingStatus(IntEnum): + """ + The status of the socket during a connection. + + If ``READING`` or ``WRITING`` you may select before polling again. + """ + + __module__ = "psycopg.pq" + + FAILED = 0 + """Connection attempt failed.""" + READING = auto() + """Will have to wait before reading new data.""" + WRITING = auto() + """Will have to wait before writing new data.""" + OK = auto() + """Connection completed.""" + + ACTIVE = auto() + + +class ExecStatus(IntEnum): + """ + The status of a command. + """ + + __module__ = "psycopg.pq" + + EMPTY_QUERY = 0 + """The string sent to the server was empty.""" + + COMMAND_OK = auto() + """Successful completion of a command returning no data.""" + + TUPLES_OK = auto() + """ + Successful completion of a command returning data (such as a SELECT or SHOW). + """ + + COPY_OUT = auto() + """Copy Out (from server) data transfer started.""" + + COPY_IN = auto() + """Copy In (to server) data transfer started.""" + + BAD_RESPONSE = auto() + """The server's response was not understood.""" + + NONFATAL_ERROR = auto() + """A nonfatal error (a notice or warning) occurred.""" + + FATAL_ERROR = auto() + """A fatal error occurred.""" + + COPY_BOTH = auto() + """ + Copy In/Out (to and from server) data transfer started. + + This feature is currently used only for streaming replication, so this + status should not occur in ordinary applications. + """ + + SINGLE_TUPLE = auto() + """ + The PGresult contains a single result tuple from the current command. + + This status occurs only when single-row mode has been selected for the + query. + """ + + PIPELINE_SYNC = auto() + """ + The PGresult represents a synchronization point in pipeline mode, + requested by PQpipelineSync. + + This status occurs only when pipeline mode has been selected. + """ + + PIPELINE_ABORTED = auto() + """ + The PGresult represents a pipeline that has received an error from the server. + + PQgetResult must be called repeatedly, and each time it will return this + status code until the end of the current pipeline, at which point it will + return PGRES_PIPELINE_SYNC and normal processing can resume. + """ + + +class TransactionStatus(IntEnum): + """ + The transaction status of a connection. + """ + + __module__ = "psycopg.pq" + + IDLE = 0 + """Connection ready, no transaction active.""" + + ACTIVE = auto() + """A command is in progress.""" + + INTRANS = auto() + """Connection idle in an open transaction.""" + + INERROR = auto() + """An error happened in the current transaction.""" + + UNKNOWN = auto() + """Unknown connection state, broken connection.""" + + +class Ping(IntEnum): + """Response from a ping attempt.""" + + __module__ = "psycopg.pq" + + OK = 0 + """ + The server is running and appears to be accepting connections. + """ + + REJECT = auto() + """ + The server is running but is in a state that disallows connections. + """ + + NO_RESPONSE = auto() + """ + The server could not be contacted. + """ + + NO_ATTEMPT = auto() + """ + No attempt was made to contact the server. + """ + + +class PipelineStatus(IntEnum): + """Pipeline mode status of the libpq connection.""" + + __module__ = "psycopg.pq" + + OFF = 0 + """ + The libpq connection is *not* in pipeline mode. + """ + ON = auto() + """ + The libpq connection is in pipeline mode. + """ + ABORTED = auto() + """ + The libpq connection is in pipeline mode and an error occurred while + processing the current pipeline. The aborted flag is cleared when + PQgetResult returns a result of type PGRES_PIPELINE_SYNC. + """ + + +class DiagnosticField(IntEnum): + """ + Fields in an error report. + """ + + __module__ = "psycopg.pq" + + # from postgres_ext.h + SEVERITY = ord("S") + SEVERITY_NONLOCALIZED = ord("V") + SQLSTATE = ord("C") + MESSAGE_PRIMARY = ord("M") + MESSAGE_DETAIL = ord("D") + MESSAGE_HINT = ord("H") + STATEMENT_POSITION = ord("P") + INTERNAL_POSITION = ord("p") + INTERNAL_QUERY = ord("q") + CONTEXT = ord("W") + SCHEMA_NAME = ord("s") + TABLE_NAME = ord("t") + COLUMN_NAME = ord("c") + DATATYPE_NAME = ord("d") + CONSTRAINT_NAME = ord("n") + SOURCE_FILE = ord("F") + SOURCE_LINE = ord("L") + SOURCE_FUNCTION = ord("R") + + +class Format(IntEnum): + """ + Enum representing the format of a query argument or return value. + + These values are only the ones managed by the libpq. `~psycopg` may also + support automatically-chosen values: see `psycopg.adapt.PyFormat`. + """ + + __module__ = "psycopg.pq" + + TEXT = 0 + """Text parameter.""" + BINARY = 1 + """Binary parameter.""" + + +class Trace(IntFlag): + """ + Enum to control tracing of the client/server communication. + """ + + __module__ = "psycopg.pq" + + SUPPRESS_TIMESTAMPS = 1 + """Do not include timestamps in messages.""" + + REGRESS_MODE = 2 + """Redact some fields, e.g. OIDs, from messages.""" diff --git a/lib/python3.11/site-packages/psycopg/pq/_pq_ctypes.py b/lib/python3.11/site-packages/psycopg/pq/_pq_ctypes.py new file mode 100644 index 0000000..9ca1d12 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/pq/_pq_ctypes.py @@ -0,0 +1,804 @@ +""" +libpq access using ctypes +""" + +# Copyright (C) 2020 The Psycopg Team + +import sys +import ctypes +import ctypes.util +from ctypes import Structure, CFUNCTYPE, POINTER +from ctypes import c_char, c_char_p, c_int, c_size_t, c_ubyte, c_uint, c_void_p +from typing import List, Optional, Tuple + +from .misc import find_libpq_full_path +from ..errors import NotSupportedError + +libname = find_libpq_full_path() +if not libname: + raise ImportError("libpq library not found") + +pq = ctypes.cdll.LoadLibrary(libname) + + +class FILE(Structure): + pass + + +FILE_ptr = POINTER(FILE) + +if sys.platform == "linux": + libcname = ctypes.util.find_library("c") + assert libcname + libc = ctypes.cdll.LoadLibrary(libcname) + + fdopen = libc.fdopen + fdopen.argtypes = (c_int, c_char_p) + fdopen.restype = FILE_ptr + + +# Get the libpq version to define what functions are available. + +PQlibVersion = pq.PQlibVersion +PQlibVersion.argtypes = [] +PQlibVersion.restype = c_int + +libpq_version = PQlibVersion() + + +# libpq data types + + +Oid = c_uint + + +class PGconn_struct(Structure): + _fields_: List[Tuple[str, type]] = [] + + +class PGresult_struct(Structure): + _fields_: List[Tuple[str, type]] = [] + + +class PQconninfoOption_struct(Structure): + _fields_ = [ + ("keyword", c_char_p), + ("envvar", c_char_p), + ("compiled", c_char_p), + ("val", c_char_p), + ("label", c_char_p), + ("dispchar", c_char_p), + ("dispsize", c_int), + ] + + +class PGnotify_struct(Structure): + _fields_ = [ + ("relname", c_char_p), + ("be_pid", c_int), + ("extra", c_char_p), + ] + + +class PGcancel_struct(Structure): + _fields_: List[Tuple[str, type]] = [] + + +class PGresAttDesc_struct(Structure): + _fields_ = [ + ("name", c_char_p), + ("tableid", Oid), + ("columnid", c_int), + ("format", c_int), + ("typid", Oid), + ("typlen", c_int), + ("atttypmod", c_int), + ] + + +PGconn_ptr = POINTER(PGconn_struct) +PGresult_ptr = POINTER(PGresult_struct) +PQconninfoOption_ptr = POINTER(PQconninfoOption_struct) +PGnotify_ptr = POINTER(PGnotify_struct) +PGcancel_ptr = POINTER(PGcancel_struct) +PGresAttDesc_ptr = POINTER(PGresAttDesc_struct) + + +# Function definitions as explained in PostgreSQL 12 documentation + +# 33.1. Database Connection Control Functions + +# PQconnectdbParams: doesn't seem useful, won't wrap for now + +PQconnectdb = pq.PQconnectdb +PQconnectdb.argtypes = [c_char_p] +PQconnectdb.restype = PGconn_ptr + +# PQsetdbLogin: not useful +# PQsetdb: not useful + +# PQconnectStartParams: not useful + +PQconnectStart = pq.PQconnectStart +PQconnectStart.argtypes = [c_char_p] +PQconnectStart.restype = PGconn_ptr + +PQconnectPoll = pq.PQconnectPoll +PQconnectPoll.argtypes = [PGconn_ptr] +PQconnectPoll.restype = c_int + +PQconndefaults = pq.PQconndefaults +PQconndefaults.argtypes = [] +PQconndefaults.restype = PQconninfoOption_ptr + +PQconninfoFree = pq.PQconninfoFree +PQconninfoFree.argtypes = [PQconninfoOption_ptr] +PQconninfoFree.restype = None + +PQconninfo = pq.PQconninfo +PQconninfo.argtypes = [PGconn_ptr] +PQconninfo.restype = PQconninfoOption_ptr + +PQconninfoParse = pq.PQconninfoParse +PQconninfoParse.argtypes = [c_char_p, POINTER(c_char_p)] +PQconninfoParse.restype = PQconninfoOption_ptr + +PQfinish = pq.PQfinish +PQfinish.argtypes = [PGconn_ptr] +PQfinish.restype = None + +PQreset = pq.PQreset +PQreset.argtypes = [PGconn_ptr] +PQreset.restype = None + +PQresetStart = pq.PQresetStart +PQresetStart.argtypes = [PGconn_ptr] +PQresetStart.restype = c_int + +PQresetPoll = pq.PQresetPoll +PQresetPoll.argtypes = [PGconn_ptr] +PQresetPoll.restype = c_int + +PQping = pq.PQping +PQping.argtypes = [c_char_p] +PQping.restype = c_int + + +# 33.2. Connection Status Functions + +PQdb = pq.PQdb +PQdb.argtypes = [PGconn_ptr] +PQdb.restype = c_char_p + +PQuser = pq.PQuser +PQuser.argtypes = [PGconn_ptr] +PQuser.restype = c_char_p + +PQpass = pq.PQpass +PQpass.argtypes = [PGconn_ptr] +PQpass.restype = c_char_p + +PQhost = pq.PQhost +PQhost.argtypes = [PGconn_ptr] +PQhost.restype = c_char_p + +_PQhostaddr = None + +if libpq_version >= 120000: + _PQhostaddr = pq.PQhostaddr + _PQhostaddr.argtypes = [PGconn_ptr] + _PQhostaddr.restype = c_char_p + + +def PQhostaddr(pgconn: PGconn_struct) -> bytes: + if not _PQhostaddr: + raise NotSupportedError( + "PQhostaddr requires libpq from PostgreSQL 12," + f" {libpq_version} available instead" + ) + + return _PQhostaddr(pgconn) + + +PQport = pq.PQport +PQport.argtypes = [PGconn_ptr] +PQport.restype = c_char_p + +PQtty = pq.PQtty +PQtty.argtypes = [PGconn_ptr] +PQtty.restype = c_char_p + +PQoptions = pq.PQoptions +PQoptions.argtypes = [PGconn_ptr] +PQoptions.restype = c_char_p + +PQstatus = pq.PQstatus +PQstatus.argtypes = [PGconn_ptr] +PQstatus.restype = c_int + +PQtransactionStatus = pq.PQtransactionStatus +PQtransactionStatus.argtypes = [PGconn_ptr] +PQtransactionStatus.restype = c_int + +PQparameterStatus = pq.PQparameterStatus +PQparameterStatus.argtypes = [PGconn_ptr, c_char_p] +PQparameterStatus.restype = c_char_p + +PQprotocolVersion = pq.PQprotocolVersion +PQprotocolVersion.argtypes = [PGconn_ptr] +PQprotocolVersion.restype = c_int + +PQserverVersion = pq.PQserverVersion +PQserverVersion.argtypes = [PGconn_ptr] +PQserverVersion.restype = c_int + +PQerrorMessage = pq.PQerrorMessage +PQerrorMessage.argtypes = [PGconn_ptr] +PQerrorMessage.restype = c_char_p + +PQsocket = pq.PQsocket +PQsocket.argtypes = [PGconn_ptr] +PQsocket.restype = c_int + +PQbackendPID = pq.PQbackendPID +PQbackendPID.argtypes = [PGconn_ptr] +PQbackendPID.restype = c_int + +PQconnectionNeedsPassword = pq.PQconnectionNeedsPassword +PQconnectionNeedsPassword.argtypes = [PGconn_ptr] +PQconnectionNeedsPassword.restype = c_int + +PQconnectionUsedPassword = pq.PQconnectionUsedPassword +PQconnectionUsedPassword.argtypes = [PGconn_ptr] +PQconnectionUsedPassword.restype = c_int + +PQsslInUse = pq.PQsslInUse +PQsslInUse.argtypes = [PGconn_ptr] +PQsslInUse.restype = c_int + +# TODO: PQsslAttribute, PQsslAttributeNames, PQsslStruct, PQgetssl + + +# 33.3. Command Execution Functions + +PQexec = pq.PQexec +PQexec.argtypes = [PGconn_ptr, c_char_p] +PQexec.restype = PGresult_ptr + +PQexecParams = pq.PQexecParams +PQexecParams.argtypes = [ + PGconn_ptr, + c_char_p, + c_int, + POINTER(Oid), + POINTER(c_char_p), + POINTER(c_int), + POINTER(c_int), + c_int, +] +PQexecParams.restype = PGresult_ptr + +PQprepare = pq.PQprepare +PQprepare.argtypes = [PGconn_ptr, c_char_p, c_char_p, c_int, POINTER(Oid)] +PQprepare.restype = PGresult_ptr + +PQexecPrepared = pq.PQexecPrepared +PQexecPrepared.argtypes = [ + PGconn_ptr, + c_char_p, + c_int, + POINTER(c_char_p), + POINTER(c_int), + POINTER(c_int), + c_int, +] +PQexecPrepared.restype = PGresult_ptr + +PQdescribePrepared = pq.PQdescribePrepared +PQdescribePrepared.argtypes = [PGconn_ptr, c_char_p] +PQdescribePrepared.restype = PGresult_ptr + +PQdescribePortal = pq.PQdescribePortal +PQdescribePortal.argtypes = [PGconn_ptr, c_char_p] +PQdescribePortal.restype = PGresult_ptr + +PQresultStatus = pq.PQresultStatus +PQresultStatus.argtypes = [PGresult_ptr] +PQresultStatus.restype = c_int + +# PQresStatus: not needed, we have pretty enums + +PQresultErrorMessage = pq.PQresultErrorMessage +PQresultErrorMessage.argtypes = [PGresult_ptr] +PQresultErrorMessage.restype = c_char_p + +# TODO: PQresultVerboseErrorMessage + +PQresultErrorField = pq.PQresultErrorField +PQresultErrorField.argtypes = [PGresult_ptr, c_int] +PQresultErrorField.restype = c_char_p + +PQclear = pq.PQclear +PQclear.argtypes = [PGresult_ptr] +PQclear.restype = None + + +# 33.3.2. Retrieving Query Result Information + +PQntuples = pq.PQntuples +PQntuples.argtypes = [PGresult_ptr] +PQntuples.restype = c_int + +PQnfields = pq.PQnfields +PQnfields.argtypes = [PGresult_ptr] +PQnfields.restype = c_int + +PQfname = pq.PQfname +PQfname.argtypes = [PGresult_ptr, c_int] +PQfname.restype = c_char_p + +# PQfnumber: useless and hard to use + +PQftable = pq.PQftable +PQftable.argtypes = [PGresult_ptr, c_int] +PQftable.restype = Oid + +PQftablecol = pq.PQftablecol +PQftablecol.argtypes = [PGresult_ptr, c_int] +PQftablecol.restype = c_int + +PQfformat = pq.PQfformat +PQfformat.argtypes = [PGresult_ptr, c_int] +PQfformat.restype = c_int + +PQftype = pq.PQftype +PQftype.argtypes = [PGresult_ptr, c_int] +PQftype.restype = Oid + +PQfmod = pq.PQfmod +PQfmod.argtypes = [PGresult_ptr, c_int] +PQfmod.restype = c_int + +PQfsize = pq.PQfsize +PQfsize.argtypes = [PGresult_ptr, c_int] +PQfsize.restype = c_int + +PQbinaryTuples = pq.PQbinaryTuples +PQbinaryTuples.argtypes = [PGresult_ptr] +PQbinaryTuples.restype = c_int + +PQgetvalue = pq.PQgetvalue +PQgetvalue.argtypes = [PGresult_ptr, c_int, c_int] +PQgetvalue.restype = POINTER(c_char) # not a null-terminated string + +PQgetisnull = pq.PQgetisnull +PQgetisnull.argtypes = [PGresult_ptr, c_int, c_int] +PQgetisnull.restype = c_int + +PQgetlength = pq.PQgetlength +PQgetlength.argtypes = [PGresult_ptr, c_int, c_int] +PQgetlength.restype = c_int + +PQnparams = pq.PQnparams +PQnparams.argtypes = [PGresult_ptr] +PQnparams.restype = c_int + +PQparamtype = pq.PQparamtype +PQparamtype.argtypes = [PGresult_ptr, c_int] +PQparamtype.restype = Oid + +# PQprint: pretty useless + +# 33.3.3. Retrieving Other Result Information + +PQcmdStatus = pq.PQcmdStatus +PQcmdStatus.argtypes = [PGresult_ptr] +PQcmdStatus.restype = c_char_p + +PQcmdTuples = pq.PQcmdTuples +PQcmdTuples.argtypes = [PGresult_ptr] +PQcmdTuples.restype = c_char_p + +PQoidValue = pq.PQoidValue +PQoidValue.argtypes = [PGresult_ptr] +PQoidValue.restype = Oid + + +# 33.3.4. Escaping Strings for Inclusion in SQL Commands + +PQescapeLiteral = pq.PQescapeLiteral +PQescapeLiteral.argtypes = [PGconn_ptr, c_char_p, c_size_t] +PQescapeLiteral.restype = POINTER(c_char) + +PQescapeIdentifier = pq.PQescapeIdentifier +PQescapeIdentifier.argtypes = [PGconn_ptr, c_char_p, c_size_t] +PQescapeIdentifier.restype = POINTER(c_char) + +PQescapeStringConn = pq.PQescapeStringConn +# TODO: raises "wrong type" error +# PQescapeStringConn.argtypes = [ +# PGconn_ptr, c_char_p, c_char_p, c_size_t, POINTER(c_int) +# ] +PQescapeStringConn.restype = c_size_t + +PQescapeString = pq.PQescapeString +# TODO: raises "wrong type" error +# PQescapeString.argtypes = [c_char_p, c_char_p, c_size_t] +PQescapeString.restype = c_size_t + +PQescapeByteaConn = pq.PQescapeByteaConn +PQescapeByteaConn.argtypes = [ + PGconn_ptr, + POINTER(c_char), # actually POINTER(c_ubyte) but this is easier + c_size_t, + POINTER(c_size_t), +] +PQescapeByteaConn.restype = POINTER(c_ubyte) + +PQescapeBytea = pq.PQescapeBytea +PQescapeBytea.argtypes = [ + POINTER(c_char), # actually POINTER(c_ubyte) but this is easier + c_size_t, + POINTER(c_size_t), +] +PQescapeBytea.restype = POINTER(c_ubyte) + + +PQunescapeBytea = pq.PQunescapeBytea +PQunescapeBytea.argtypes = [ + POINTER(c_char), # actually POINTER(c_ubyte) but this is easier + POINTER(c_size_t), +] +PQunescapeBytea.restype = POINTER(c_ubyte) + + +# 33.4. Asynchronous Command Processing + +PQsendQuery = pq.PQsendQuery +PQsendQuery.argtypes = [PGconn_ptr, c_char_p] +PQsendQuery.restype = c_int + +PQsendQueryParams = pq.PQsendQueryParams +PQsendQueryParams.argtypes = [ + PGconn_ptr, + c_char_p, + c_int, + POINTER(Oid), + POINTER(c_char_p), + POINTER(c_int), + POINTER(c_int), + c_int, +] +PQsendQueryParams.restype = c_int + +PQsendPrepare = pq.PQsendPrepare +PQsendPrepare.argtypes = [PGconn_ptr, c_char_p, c_char_p, c_int, POINTER(Oid)] +PQsendPrepare.restype = c_int + +PQsendQueryPrepared = pq.PQsendQueryPrepared +PQsendQueryPrepared.argtypes = [ + PGconn_ptr, + c_char_p, + c_int, + POINTER(c_char_p), + POINTER(c_int), + POINTER(c_int), + c_int, +] +PQsendQueryPrepared.restype = c_int + +PQsendDescribePrepared = pq.PQsendDescribePrepared +PQsendDescribePrepared.argtypes = [PGconn_ptr, c_char_p] +PQsendDescribePrepared.restype = c_int + +PQsendDescribePortal = pq.PQsendDescribePortal +PQsendDescribePortal.argtypes = [PGconn_ptr, c_char_p] +PQsendDescribePortal.restype = c_int + +PQgetResult = pq.PQgetResult +PQgetResult.argtypes = [PGconn_ptr] +PQgetResult.restype = PGresult_ptr + +PQconsumeInput = pq.PQconsumeInput +PQconsumeInput.argtypes = [PGconn_ptr] +PQconsumeInput.restype = c_int + +PQisBusy = pq.PQisBusy +PQisBusy.argtypes = [PGconn_ptr] +PQisBusy.restype = c_int + +PQsetnonblocking = pq.PQsetnonblocking +PQsetnonblocking.argtypes = [PGconn_ptr, c_int] +PQsetnonblocking.restype = c_int + +PQisnonblocking = pq.PQisnonblocking +PQisnonblocking.argtypes = [PGconn_ptr] +PQisnonblocking.restype = c_int + +PQflush = pq.PQflush +PQflush.argtypes = [PGconn_ptr] +PQflush.restype = c_int + + +# 33.5. Retrieving Query Results Row-by-Row +PQsetSingleRowMode = pq.PQsetSingleRowMode +PQsetSingleRowMode.argtypes = [PGconn_ptr] +PQsetSingleRowMode.restype = c_int + + +# 33.6. Canceling Queries in Progress + +PQgetCancel = pq.PQgetCancel +PQgetCancel.argtypes = [PGconn_ptr] +PQgetCancel.restype = PGcancel_ptr + +PQfreeCancel = pq.PQfreeCancel +PQfreeCancel.argtypes = [PGcancel_ptr] +PQfreeCancel.restype = None + +PQcancel = pq.PQcancel +# TODO: raises "wrong type" error +# PQcancel.argtypes = [PGcancel_ptr, POINTER(c_char), c_int] +PQcancel.restype = c_int + + +# 33.8. Asynchronous Notification + +PQnotifies = pq.PQnotifies +PQnotifies.argtypes = [PGconn_ptr] +PQnotifies.restype = PGnotify_ptr + + +# 33.9. Functions Associated with the COPY Command + +PQputCopyData = pq.PQputCopyData +PQputCopyData.argtypes = [PGconn_ptr, c_char_p, c_int] +PQputCopyData.restype = c_int + +PQputCopyEnd = pq.PQputCopyEnd +PQputCopyEnd.argtypes = [PGconn_ptr, c_char_p] +PQputCopyEnd.restype = c_int + +PQgetCopyData = pq.PQgetCopyData +PQgetCopyData.argtypes = [PGconn_ptr, POINTER(c_char_p), c_int] +PQgetCopyData.restype = c_int + + +# 33.10. Control Functions + +PQtrace = pq.PQtrace +PQtrace.argtypes = [PGconn_ptr, FILE_ptr] +PQtrace.restype = None + +_PQsetTraceFlags = None + +if libpq_version >= 140000: + _PQsetTraceFlags = pq.PQsetTraceFlags + _PQsetTraceFlags.argtypes = [PGconn_ptr, c_int] + _PQsetTraceFlags.restype = None + + +def PQsetTraceFlags(pgconn: PGconn_struct, flags: int) -> None: + if not _PQsetTraceFlags: + raise NotSupportedError( + "PQsetTraceFlags requires libpq from PostgreSQL 14," + f" {libpq_version} available instead" + ) + + _PQsetTraceFlags(pgconn, flags) + + +PQuntrace = pq.PQuntrace +PQuntrace.argtypes = [PGconn_ptr] +PQuntrace.restype = None + +# 33.11. Miscellaneous Functions + +PQfreemem = pq.PQfreemem +PQfreemem.argtypes = [c_void_p] +PQfreemem.restype = None + +if libpq_version >= 100000: + _PQencryptPasswordConn = pq.PQencryptPasswordConn + _PQencryptPasswordConn.argtypes = [ + PGconn_ptr, + c_char_p, + c_char_p, + c_char_p, + ] + _PQencryptPasswordConn.restype = POINTER(c_char) + + +def PQencryptPasswordConn( + pgconn: PGconn_struct, passwd: bytes, user: bytes, algorithm: bytes +) -> Optional[bytes]: + if not _PQencryptPasswordConn: + raise NotSupportedError( + "PQencryptPasswordConn requires libpq from PostgreSQL 10," + f" {libpq_version} available instead" + ) + + return _PQencryptPasswordConn(pgconn, passwd, user, algorithm) + + +PQmakeEmptyPGresult = pq.PQmakeEmptyPGresult +PQmakeEmptyPGresult.argtypes = [PGconn_ptr, c_int] +PQmakeEmptyPGresult.restype = PGresult_ptr + +PQsetResultAttrs = pq.PQsetResultAttrs +PQsetResultAttrs.argtypes = [PGresult_ptr, c_int, PGresAttDesc_ptr] +PQsetResultAttrs.restype = c_int + + +# 33.12. Notice Processing + +PQnoticeReceiver = CFUNCTYPE(None, c_void_p, PGresult_ptr) + +PQsetNoticeReceiver = pq.PQsetNoticeReceiver +PQsetNoticeReceiver.argtypes = [PGconn_ptr, PQnoticeReceiver, c_void_p] +PQsetNoticeReceiver.restype = PQnoticeReceiver + +# 34.5 Pipeline Mode + +_PQpipelineStatus = None +_PQenterPipelineMode = None +_PQexitPipelineMode = None +_PQpipelineSync = None +_PQsendFlushRequest = None + +if libpq_version >= 140000: + _PQpipelineStatus = pq.PQpipelineStatus + _PQpipelineStatus.argtypes = [PGconn_ptr] + _PQpipelineStatus.restype = c_int + + _PQenterPipelineMode = pq.PQenterPipelineMode + _PQenterPipelineMode.argtypes = [PGconn_ptr] + _PQenterPipelineMode.restype = c_int + + _PQexitPipelineMode = pq.PQexitPipelineMode + _PQexitPipelineMode.argtypes = [PGconn_ptr] + _PQexitPipelineMode.restype = c_int + + _PQpipelineSync = pq.PQpipelineSync + _PQpipelineSync.argtypes = [PGconn_ptr] + _PQpipelineSync.restype = c_int + + _PQsendFlushRequest = pq.PQsendFlushRequest + _PQsendFlushRequest.argtypes = [PGconn_ptr] + _PQsendFlushRequest.restype = c_int + + +def PQpipelineStatus(pgconn: PGconn_struct) -> int: + if not _PQpipelineStatus: + raise NotSupportedError( + "PQpipelineStatus requires libpq from PostgreSQL 14," + f" {libpq_version} available instead" + ) + return _PQpipelineStatus(pgconn) + + +def PQenterPipelineMode(pgconn: PGconn_struct) -> int: + if not _PQenterPipelineMode: + raise NotSupportedError( + "PQenterPipelineMode requires libpq from PostgreSQL 14," + f" {libpq_version} available instead" + ) + return _PQenterPipelineMode(pgconn) + + +def PQexitPipelineMode(pgconn: PGconn_struct) -> int: + if not _PQexitPipelineMode: + raise NotSupportedError( + "PQexitPipelineMode requires libpq from PostgreSQL 14," + f" {libpq_version} available instead" + ) + return _PQexitPipelineMode(pgconn) + + +def PQpipelineSync(pgconn: PGconn_struct) -> int: + if not _PQpipelineSync: + raise NotSupportedError( + "PQpipelineSync requires libpq from PostgreSQL 14," + f" {libpq_version} available instead" + ) + return _PQpipelineSync(pgconn) + + +def PQsendFlushRequest(pgconn: PGconn_struct) -> int: + if not _PQsendFlushRequest: + raise NotSupportedError( + "PQsendFlushRequest requires libpq from PostgreSQL 14," + f" {libpq_version} available instead" + ) + return _PQsendFlushRequest(pgconn) + + +# 33.18. SSL Support + +PQinitOpenSSL = pq.PQinitOpenSSL +PQinitOpenSSL.argtypes = [c_int, c_int] +PQinitOpenSSL.restype = None + + +def generate_stub() -> None: + import re + from ctypes import _CFuncPtr # type: ignore + + def type2str(fname, narg, t): + if t is None: + return "None" + elif t is c_void_p: + return "Any" + elif t is c_int or t is c_uint or t is c_size_t: + return "int" + elif t is c_char_p or t.__name__ == "LP_c_char": + if narg is not None: + return "bytes" + else: + return "Optional[bytes]" + + elif t.__name__ in ( + "LP_PGconn_struct", + "LP_PGresult_struct", + "LP_PGcancel_struct", + ): + if narg is not None: + return f"Optional[{t.__name__[3:]}]" + else: + return t.__name__[3:] + + elif t.__name__ in ("LP_PQconninfoOption_struct",): + return f"Sequence[{t.__name__[3:]}]" + + elif t.__name__ in ( + "LP_c_ubyte", + "LP_c_char_p", + "LP_c_int", + "LP_c_uint", + "LP_c_ulong", + "LP_FILE", + ): + return f"_Pointer[{t.__name__[3:]}]" + + else: + assert False, f"can't deal with {t} in {fname}" + + fn = __file__ + "i" + with open(fn) as f: + lines = f.read().splitlines() + + istart, iend = ( + i + for i, line in enumerate(lines) + if re.match(r"\s*#\s*autogenerated:\s+(start|end)", line) + ) + + known = { + line[4:].split("(", 1)[0] for line in lines[:istart] if line.startswith("def ") + } + + signatures = [] + + for name, obj in globals().items(): + if name in known: + continue + if not isinstance(obj, _CFuncPtr): + continue + + params = [] + for i, t in enumerate(obj.argtypes): + params.append(f"arg{i + 1}: {type2str(name, i, t)}") + + resname = type2str(name, None, obj.restype) + + signatures.append(f"def {name}({', '.join(params)}) -> {resname}: ...") + + lines[istart + 1 : iend] = signatures + + with open(fn, "w") as f: + f.write("\n".join(lines)) + f.write("\n") + + +if __name__ == "__main__": + generate_stub() diff --git a/lib/python3.11/site-packages/psycopg/pq/abc.py b/lib/python3.11/site-packages/psycopg/pq/abc.py new file mode 100644 index 0000000..971d00e --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/pq/abc.py @@ -0,0 +1,384 @@ +""" +Protocol objects to represent objects exposed by different pq implementations. +""" + +# Copyright (C) 2020 The Psycopg Team + +from typing import Any, Callable, List, Optional, Sequence, Tuple +from typing import Union, TYPE_CHECKING +from typing_extensions import TypeAlias + +from ._enums import Format, Trace +from .._compat import Protocol + +if TYPE_CHECKING: + from .misc import PGnotify, ConninfoOption, PGresAttDesc + +# An object implementing the buffer protocol (ish) +Buffer: TypeAlias = Union[bytes, bytearray, memoryview] + + +class PGconn(Protocol): + notice_handler: Optional[Callable[["PGresult"], None]] + notify_handler: Optional[Callable[["PGnotify"], None]] + + @classmethod + def connect(cls, conninfo: bytes) -> "PGconn": + ... + + @classmethod + def connect_start(cls, conninfo: bytes) -> "PGconn": + ... + + def connect_poll(self) -> int: + ... + + def finish(self) -> None: + ... + + @property + def info(self) -> List["ConninfoOption"]: + ... + + def reset(self) -> None: + ... + + def reset_start(self) -> None: + ... + + def reset_poll(self) -> int: + ... + + @classmethod + def ping(self, conninfo: bytes) -> int: + ... + + @property + def db(self) -> bytes: + ... + + @property + def user(self) -> bytes: + ... + + @property + def password(self) -> bytes: + ... + + @property + def host(self) -> bytes: + ... + + @property + def hostaddr(self) -> bytes: + ... + + @property + def port(self) -> bytes: + ... + + @property + def tty(self) -> bytes: + ... + + @property + def options(self) -> bytes: + ... + + @property + def status(self) -> int: + ... + + @property + def transaction_status(self) -> int: + ... + + def parameter_status(self, name: bytes) -> Optional[bytes]: + ... + + @property + def error_message(self) -> bytes: + ... + + @property + def server_version(self) -> int: + ... + + @property + def socket(self) -> int: + ... + + @property + def backend_pid(self) -> int: + ... + + @property + def needs_password(self) -> bool: + ... + + @property + def used_password(self) -> bool: + ... + + @property + def ssl_in_use(self) -> bool: + ... + + def exec_(self, command: bytes) -> "PGresult": + ... + + def send_query(self, command: bytes) -> None: + ... + + def exec_params( + self, + command: bytes, + param_values: Optional[Sequence[Optional[Buffer]]], + param_types: Optional[Sequence[int]] = None, + param_formats: Optional[Sequence[int]] = None, + result_format: int = Format.TEXT, + ) -> "PGresult": + ... + + def send_query_params( + self, + command: bytes, + param_values: Optional[Sequence[Optional[Buffer]]], + param_types: Optional[Sequence[int]] = None, + param_formats: Optional[Sequence[int]] = None, + result_format: int = Format.TEXT, + ) -> None: + ... + + def send_prepare( + self, + name: bytes, + command: bytes, + param_types: Optional[Sequence[int]] = None, + ) -> None: + ... + + def send_query_prepared( + self, + name: bytes, + param_values: Optional[Sequence[Optional[Buffer]]], + param_formats: Optional[Sequence[int]] = None, + result_format: int = Format.TEXT, + ) -> None: + ... + + def prepare( + self, + name: bytes, + command: bytes, + param_types: Optional[Sequence[int]] = None, + ) -> "PGresult": + ... + + def exec_prepared( + self, + name: bytes, + param_values: Optional[Sequence[Buffer]], + param_formats: Optional[Sequence[int]] = None, + result_format: int = 0, + ) -> "PGresult": + ... + + def describe_prepared(self, name: bytes) -> "PGresult": + ... + + def send_describe_prepared(self, name: bytes) -> None: + ... + + def describe_portal(self, name: bytes) -> "PGresult": + ... + + def send_describe_portal(self, name: bytes) -> None: + ... + + def get_result(self) -> Optional["PGresult"]: + ... + + def consume_input(self) -> None: + ... + + def is_busy(self) -> int: + ... + + @property + def nonblocking(self) -> int: + ... + + @nonblocking.setter + def nonblocking(self, arg: int) -> None: + ... + + def flush(self) -> int: + ... + + def set_single_row_mode(self) -> None: + ... + + def get_cancel(self) -> "PGcancel": + ... + + def notifies(self) -> Optional["PGnotify"]: + ... + + def put_copy_data(self, buffer: Buffer) -> int: + ... + + def put_copy_end(self, error: Optional[bytes] = None) -> int: + ... + + def get_copy_data(self, async_: int) -> Tuple[int, memoryview]: + ... + + def trace(self, fileno: int) -> None: + ... + + def set_trace_flags(self, flags: Trace) -> None: + ... + + def untrace(self) -> None: + ... + + def encrypt_password( + self, passwd: bytes, user: bytes, algorithm: Optional[bytes] = None + ) -> bytes: + ... + + def make_empty_result(self, exec_status: int) -> "PGresult": + ... + + @property + def pipeline_status(self) -> int: + ... + + def enter_pipeline_mode(self) -> None: + ... + + def exit_pipeline_mode(self) -> None: + ... + + def pipeline_sync(self) -> None: + ... + + def send_flush_request(self) -> None: + ... + + +class PGresult(Protocol): + def clear(self) -> None: + ... + + @property + def status(self) -> int: + ... + + @property + def error_message(self) -> bytes: + ... + + def error_field(self, fieldcode: int) -> Optional[bytes]: + ... + + @property + def ntuples(self) -> int: + ... + + @property + def nfields(self) -> int: + ... + + def fname(self, column_number: int) -> Optional[bytes]: + ... + + def ftable(self, column_number: int) -> int: + ... + + def ftablecol(self, column_number: int) -> int: + ... + + def fformat(self, column_number: int) -> int: + ... + + def ftype(self, column_number: int) -> int: + ... + + def fmod(self, column_number: int) -> int: + ... + + def fsize(self, column_number: int) -> int: + ... + + @property + def binary_tuples(self) -> int: + ... + + def get_value(self, row_number: int, column_number: int) -> Optional[bytes]: + ... + + @property + def nparams(self) -> int: + ... + + def param_type(self, param_number: int) -> int: + ... + + @property + def command_status(self) -> Optional[bytes]: + ... + + @property + def command_tuples(self) -> Optional[int]: + ... + + @property + def oid_value(self) -> int: + ... + + def set_attributes(self, descriptions: List["PGresAttDesc"]) -> None: + ... + + +class PGcancel(Protocol): + def free(self) -> None: + ... + + def cancel(self) -> None: + ... + + +class Conninfo(Protocol): + @classmethod + def get_defaults(cls) -> List["ConninfoOption"]: + ... + + @classmethod + def parse(cls, conninfo: bytes) -> List["ConninfoOption"]: + ... + + @classmethod + def _options_from_array(cls, opts: Sequence[Any]) -> List["ConninfoOption"]: + ... + + +class Escaping(Protocol): + def __init__(self, conn: Optional[PGconn] = None): + ... + + def escape_literal(self, data: Buffer) -> bytes: + ... + + def escape_identifier(self, data: Buffer) -> bytes: + ... + + def escape_string(self, data: Buffer) -> bytes: + ... + + def escape_bytea(self, data: Buffer) -> bytes: + ... + + def unescape_bytea(self, data: Buffer) -> bytes: + ... diff --git a/lib/python3.11/site-packages/psycopg/pq/misc.py b/lib/python3.11/site-packages/psycopg/pq/misc.py new file mode 100644 index 0000000..3a43133 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/pq/misc.py @@ -0,0 +1,146 @@ +""" +Various functionalities to make easier to work with the libpq. +""" + +# Copyright (C) 2020 The Psycopg Team + +import os +import sys +import logging +import ctypes.util +from typing import cast, NamedTuple, Optional, Union + +from .abc import PGconn, PGresult +from ._enums import ConnStatus, TransactionStatus, PipelineStatus +from .._compat import cache +from .._encodings import pgconn_encoding + +logger = logging.getLogger("psycopg.pq") + +OK = ConnStatus.OK + + +class PGnotify(NamedTuple): + relname: bytes + be_pid: int + extra: bytes + + +class ConninfoOption(NamedTuple): + keyword: bytes + envvar: Optional[bytes] + compiled: Optional[bytes] + val: Optional[bytes] + label: bytes + dispchar: bytes + dispsize: int + + +class PGresAttDesc(NamedTuple): + name: bytes + tableid: int + columnid: int + format: int + typid: int + typlen: int + atttypmod: int + + +@cache +def find_libpq_full_path() -> Optional[str]: + if sys.platform == "win32": + libname = ctypes.util.find_library("libpq.dll") + + elif sys.platform == "darwin": + libname = ctypes.util.find_library("libpq.dylib") + # (hopefully) temporary hack: libpq not in a standard place + # https://github.com/orgs/Homebrew/discussions/3595 + # If pg_config is available and agrees, let's use its indications. + if not libname: + try: + import subprocess as sp + + libdir = sp.check_output(["pg_config", "--libdir"]).strip().decode() + libname = os.path.join(libdir, "libpq.dylib") + if not os.path.exists(libname): + libname = None + except Exception as ex: + logger.debug("couldn't use pg_config to find libpq: %s", ex) + + else: + libname = ctypes.util.find_library("pq") + + return libname + + +def error_message(obj: Union[PGconn, PGresult], encoding: str = "utf8") -> str: + """ + Return an error message from a `PGconn` or `PGresult`. + + The return value is a `!str` (unlike pq data which is usually `!bytes`): + use the connection encoding if available, otherwise the `!encoding` + parameter as a fallback for decoding. Don't raise exceptions on decoding + errors. + + """ + bmsg: bytes + + if hasattr(obj, "error_field"): + # obj is a PGresult + obj = cast(PGresult, obj) + bmsg = obj.error_message + + # strip severity and whitespaces + if bmsg: + bmsg = bmsg.split(b":", 1)[-1].strip() + + elif hasattr(obj, "error_message"): + # obj is a PGconn + if obj.status == OK: + encoding = pgconn_encoding(obj) + bmsg = obj.error_message + + # strip severity and whitespaces + if bmsg: + bmsg = bmsg.split(b":", 1)[-1].strip() + + else: + raise TypeError(f"PGconn or PGresult expected, got {type(obj).__name__}") + + if bmsg: + msg = bmsg.decode(encoding, "replace") + else: + msg = "no details available" + + return msg + + +def connection_summary(pgconn: PGconn) -> str: + """ + Return summary information on a connection. + + Useful for __repr__ + """ + parts = [] + if pgconn.status == OK: + # Put together the [STATUS] + status = TransactionStatus(pgconn.transaction_status).name + if pgconn.pipeline_status: + status += f", pipeline={PipelineStatus(pgconn.pipeline_status).name}" + + # Put together the (CONNECTION) + if not pgconn.host.startswith(b"/"): + parts.append(("host", pgconn.host.decode())) + if pgconn.port != b"5432": + parts.append(("port", pgconn.port.decode())) + if pgconn.user != pgconn.db: + parts.append(("user", pgconn.user.decode())) + parts.append(("database", pgconn.db.decode())) + + else: + status = ConnStatus(pgconn.status).name + + sparts = " ".join("%s=%s" % part for part in parts) + if sparts: + sparts = f" ({sparts})" + return f"[{status}]{sparts}" diff --git a/lib/python3.11/site-packages/psycopg/pq/pq_ctypes.py b/lib/python3.11/site-packages/psycopg/pq/pq_ctypes.py new file mode 100644 index 0000000..204e384 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/pq/pq_ctypes.py @@ -0,0 +1,1088 @@ +""" +libpq Python wrapper using ctypes bindings. + +Clients shouldn't use this module directly, unless for testing: they should use +the `pq` module instead, which is in charge of choosing the best +implementation. +""" + +# Copyright (C) 2020 The Psycopg Team + +import sys +import logging +from os import getpid +from weakref import ref + +from ctypes import Array, POINTER, cast, string_at, create_string_buffer, byref +from ctypes import addressof, c_char_p, c_int, c_size_t, c_ulong, c_void_p, py_object +from typing import Any, Callable, List, Optional, Sequence, Tuple +from typing import cast as t_cast, TYPE_CHECKING + +from .. import errors as e +from . import _pq_ctypes as impl +from .misc import PGnotify, ConninfoOption, PGresAttDesc +from .misc import error_message, connection_summary +from ._enums import Format, ExecStatus, Trace + +# Imported locally to call them from __del__ methods +from ._pq_ctypes import PQclear, PQfinish, PQfreeCancel, PQstatus + +if TYPE_CHECKING: + from . import abc + +__impl__ = "python" + +logger = logging.getLogger("psycopg") + + +def version() -> int: + """Return the version number of the libpq currently loaded. + + The number is in the same format of `~psycopg.ConnectionInfo.server_version`. + + Certain features might not be available if the libpq library used is too old. + """ + return impl.PQlibVersion() + + +@impl.PQnoticeReceiver # type: ignore +def notice_receiver(arg: c_void_p, result_ptr: impl.PGresult_struct) -> None: + pgconn = cast(arg, POINTER(py_object)).contents.value() + if not (pgconn and pgconn.notice_handler): + return + + res = PGresult(result_ptr) + try: + pgconn.notice_handler(res) + except Exception as exc: + logger.exception("error in notice receiver: %s", exc) + finally: + res._pgresult_ptr = None # avoid destroying the pgresult_ptr + + +class PGconn: + """ + Python representation of a libpq connection. + """ + + __slots__ = ( + "_pgconn_ptr", + "notice_handler", + "notify_handler", + "_self_ptr", + "_procpid", + "__weakref__", + ) + + def __init__(self, pgconn_ptr: impl.PGconn_struct): + self._pgconn_ptr: Optional[impl.PGconn_struct] = pgconn_ptr + self.notice_handler: Optional[Callable[["abc.PGresult"], None]] = None + self.notify_handler: Optional[Callable[[PGnotify], None]] = None + + # Keep alive for the lifetime of PGconn + self._self_ptr = py_object(ref(self)) + impl.PQsetNoticeReceiver(pgconn_ptr, notice_receiver, byref(self._self_ptr)) + + self._procpid = getpid() + + def __del__(self) -> None: + # Close the connection only if it was created in this process, + # not if this object is being GC'd after fork. + if getpid() == self._procpid: + self.finish() + + def __repr__(self) -> str: + cls = f"{self.__class__.__module__}.{self.__class__.__qualname__}" + info = connection_summary(self) + return f"<{cls} {info} at 0x{id(self):x}>" + + @classmethod + def connect(cls, conninfo: bytes) -> "PGconn": + if not isinstance(conninfo, bytes): + raise TypeError(f"bytes expected, got {type(conninfo)} instead") + + pgconn_ptr = impl.PQconnectdb(conninfo) + if not pgconn_ptr: + raise MemoryError("couldn't allocate PGconn") + return cls(pgconn_ptr) + + @classmethod + def connect_start(cls, conninfo: bytes) -> "PGconn": + if not isinstance(conninfo, bytes): + raise TypeError(f"bytes expected, got {type(conninfo)} instead") + + pgconn_ptr = impl.PQconnectStart(conninfo) + if not pgconn_ptr: + raise MemoryError("couldn't allocate PGconn") + return cls(pgconn_ptr) + + def connect_poll(self) -> int: + return self._call_int(impl.PQconnectPoll) + + def finish(self) -> None: + self._pgconn_ptr, p = None, self._pgconn_ptr + if p: + PQfinish(p) + + @property + def pgconn_ptr(self) -> Optional[int]: + """The pointer to the underlying `!PGconn` structure, as integer. + + `!None` if the connection is closed. + + The value can be used to pass the structure to libpq functions which + psycopg doesn't (currently) wrap, either in C or in Python using FFI + libraries such as `ctypes`. + """ + if self._pgconn_ptr is None: + return None + + return addressof(self._pgconn_ptr.contents) # type: ignore[attr-defined] + + @property + def info(self) -> List["ConninfoOption"]: + self._ensure_pgconn() + opts = impl.PQconninfo(self._pgconn_ptr) + if not opts: + raise MemoryError("couldn't allocate connection info") + try: + return Conninfo._options_from_array(opts) + finally: + impl.PQconninfoFree(opts) + + def reset(self) -> None: + self._ensure_pgconn() + impl.PQreset(self._pgconn_ptr) + + def reset_start(self) -> None: + if not impl.PQresetStart(self._pgconn_ptr): + raise e.OperationalError("couldn't reset connection") + + def reset_poll(self) -> int: + return self._call_int(impl.PQresetPoll) + + @classmethod + def ping(self, conninfo: bytes) -> int: + if not isinstance(conninfo, bytes): + raise TypeError(f"bytes expected, got {type(conninfo)} instead") + + return impl.PQping(conninfo) + + @property + def db(self) -> bytes: + return self._call_bytes(impl.PQdb) + + @property + def user(self) -> bytes: + return self._call_bytes(impl.PQuser) + + @property + def password(self) -> bytes: + return self._call_bytes(impl.PQpass) + + @property + def host(self) -> bytes: + return self._call_bytes(impl.PQhost) + + @property + def hostaddr(self) -> bytes: + return self._call_bytes(impl.PQhostaddr) + + @property + def port(self) -> bytes: + return self._call_bytes(impl.PQport) + + @property + def tty(self) -> bytes: + return self._call_bytes(impl.PQtty) + + @property + def options(self) -> bytes: + return self._call_bytes(impl.PQoptions) + + @property + def status(self) -> int: + return PQstatus(self._pgconn_ptr) + + @property + def transaction_status(self) -> int: + return impl.PQtransactionStatus(self._pgconn_ptr) + + def parameter_status(self, name: bytes) -> Optional[bytes]: + self._ensure_pgconn() + return impl.PQparameterStatus(self._pgconn_ptr, name) + + @property + def error_message(self) -> bytes: + return impl.PQerrorMessage(self._pgconn_ptr) + + @property + def protocol_version(self) -> int: + return self._call_int(impl.PQprotocolVersion) + + @property + def server_version(self) -> int: + return self._call_int(impl.PQserverVersion) + + @property + def socket(self) -> int: + rv = self._call_int(impl.PQsocket) + if rv == -1: + raise e.OperationalError("the connection is lost") + return rv + + @property + def backend_pid(self) -> int: + return self._call_int(impl.PQbackendPID) + + @property + def needs_password(self) -> bool: + """True if the connection authentication method required a password, + but none was available. + + See :pq:`PQconnectionNeedsPassword` for details. + """ + return bool(impl.PQconnectionNeedsPassword(self._pgconn_ptr)) + + @property + def used_password(self) -> bool: + """True if the connection authentication method used a password. + + See :pq:`PQconnectionUsedPassword` for details. + """ + return bool(impl.PQconnectionUsedPassword(self._pgconn_ptr)) + + @property + def ssl_in_use(self) -> bool: + return self._call_bool(impl.PQsslInUse) + + def exec_(self, command: bytes) -> "PGresult": + if not isinstance(command, bytes): + raise TypeError(f"bytes expected, got {type(command)} instead") + self._ensure_pgconn() + rv = impl.PQexec(self._pgconn_ptr, command) + if not rv: + raise e.OperationalError(f"executing query failed: {error_message(self)}") + return PGresult(rv) + + def send_query(self, command: bytes) -> None: + if not isinstance(command, bytes): + raise TypeError(f"bytes expected, got {type(command)} instead") + self._ensure_pgconn() + if not impl.PQsendQuery(self._pgconn_ptr, command): + raise e.OperationalError(f"sending query failed: {error_message(self)}") + + def exec_params( + self, + command: bytes, + param_values: Optional[Sequence[Optional["abc.Buffer"]]], + param_types: Optional[Sequence[int]] = None, + param_formats: Optional[Sequence[int]] = None, + result_format: int = Format.TEXT, + ) -> "PGresult": + args = self._query_params_args( + command, param_values, param_types, param_formats, result_format + ) + self._ensure_pgconn() + rv = impl.PQexecParams(*args) + if not rv: + raise e.OperationalError(f"executing query failed: {error_message(self)}") + return PGresult(rv) + + def send_query_params( + self, + command: bytes, + param_values: Optional[Sequence[Optional["abc.Buffer"]]], + param_types: Optional[Sequence[int]] = None, + param_formats: Optional[Sequence[int]] = None, + result_format: int = Format.TEXT, + ) -> None: + args = self._query_params_args( + command, param_values, param_types, param_formats, result_format + ) + self._ensure_pgconn() + if not impl.PQsendQueryParams(*args): + raise e.OperationalError( + f"sending query and params failed: {error_message(self)}" + ) + + def send_prepare( + self, + name: bytes, + command: bytes, + param_types: Optional[Sequence[int]] = None, + ) -> None: + atypes: Optional[Array[impl.Oid]] + if not param_types: + nparams = 0 + atypes = None + else: + nparams = len(param_types) + atypes = (impl.Oid * nparams)(*param_types) + + self._ensure_pgconn() + if not impl.PQsendPrepare(self._pgconn_ptr, name, command, nparams, atypes): + raise e.OperationalError( + f"sending query and params failed: {error_message(self)}" + ) + + def send_query_prepared( + self, + name: bytes, + param_values: Optional[Sequence[Optional["abc.Buffer"]]], + param_formats: Optional[Sequence[int]] = None, + result_format: int = Format.TEXT, + ) -> None: + # repurpose this function with a cheeky replacement of query with name, + # drop the param_types from the result + args = self._query_params_args( + name, param_values, None, param_formats, result_format + ) + args = args[:3] + args[4:] + + self._ensure_pgconn() + if not impl.PQsendQueryPrepared(*args): + raise e.OperationalError( + f"sending prepared query failed: {error_message(self)}" + ) + + def _query_params_args( + self, + command: bytes, + param_values: Optional[Sequence[Optional["abc.Buffer"]]], + param_types: Optional[Sequence[int]] = None, + param_formats: Optional[Sequence[int]] = None, + result_format: int = Format.TEXT, + ) -> Any: + if not isinstance(command, bytes): + raise TypeError(f"bytes expected, got {type(command)} instead") + + aparams: Optional[Array[c_char_p]] + alenghts: Optional[Array[c_int]] + if param_values: + nparams = len(param_values) + aparams = (c_char_p * nparams)( + *( + # convert bytearray/memoryview to bytes + b if b is None or isinstance(b, bytes) else bytes(b) + for b in param_values + ) + ) + alenghts = (c_int * nparams)(*(len(p) if p else 0 for p in param_values)) + else: + nparams = 0 + aparams = alenghts = None + + atypes: Optional[Array[impl.Oid]] + if not param_types: + atypes = None + else: + if len(param_types) != nparams: + raise ValueError( + "got %d param_values but %d param_types" + % (nparams, len(param_types)) + ) + atypes = (impl.Oid * nparams)(*param_types) + + if not param_formats: + aformats = None + else: + if len(param_formats) != nparams: + raise ValueError( + "got %d param_values but %d param_formats" + % (nparams, len(param_formats)) + ) + aformats = (c_int * nparams)(*param_formats) + + return ( + self._pgconn_ptr, + command, + nparams, + atypes, + aparams, + alenghts, + aformats, + result_format, + ) + + def prepare( + self, + name: bytes, + command: bytes, + param_types: Optional[Sequence[int]] = None, + ) -> "PGresult": + if not isinstance(name, bytes): + raise TypeError(f"'name' must be bytes, got {type(name)} instead") + + if not isinstance(command, bytes): + raise TypeError(f"'command' must be bytes, got {type(command)} instead") + + if not param_types: + nparams = 0 + atypes = None + else: + nparams = len(param_types) + atypes = (impl.Oid * nparams)(*param_types) + + self._ensure_pgconn() + rv = impl.PQprepare(self._pgconn_ptr, name, command, nparams, atypes) + if not rv: + raise e.OperationalError(f"preparing query failed: {error_message(self)}") + return PGresult(rv) + + def exec_prepared( + self, + name: bytes, + param_values: Optional[Sequence["abc.Buffer"]], + param_formats: Optional[Sequence[int]] = None, + result_format: int = 0, + ) -> "PGresult": + if not isinstance(name, bytes): + raise TypeError(f"'name' must be bytes, got {type(name)} instead") + + aparams: Optional[Array[c_char_p]] + alenghts: Optional[Array[c_int]] + if param_values: + nparams = len(param_values) + aparams = (c_char_p * nparams)( + *( + # convert bytearray/memoryview to bytes + b if b is None or isinstance(b, bytes) else bytes(b) + for b in param_values + ) + ) + alenghts = (c_int * nparams)(*(len(p) if p else 0 for p in param_values)) + else: + nparams = 0 + aparams = alenghts = None + + if not param_formats: + aformats = None + else: + if len(param_formats) != nparams: + raise ValueError( + "got %d param_values but %d param_types" + % (nparams, len(param_formats)) + ) + aformats = (c_int * nparams)(*param_formats) + + self._ensure_pgconn() + rv = impl.PQexecPrepared( + self._pgconn_ptr, + name, + nparams, + aparams, + alenghts, + aformats, + result_format, + ) + if not rv: + raise e.OperationalError( + f"executing prepared query failed: {error_message(self)}" + ) + return PGresult(rv) + + def describe_prepared(self, name: bytes) -> "PGresult": + if not isinstance(name, bytes): + raise TypeError(f"'name' must be bytes, got {type(name)} instead") + self._ensure_pgconn() + rv = impl.PQdescribePrepared(self._pgconn_ptr, name) + if not rv: + raise e.OperationalError(f"describe prepared failed: {error_message(self)}") + return PGresult(rv) + + def send_describe_prepared(self, name: bytes) -> None: + if not isinstance(name, bytes): + raise TypeError(f"bytes expected, got {type(name)} instead") + self._ensure_pgconn() + if not impl.PQsendDescribePrepared(self._pgconn_ptr, name): + raise e.OperationalError( + f"sending describe prepared failed: {error_message(self)}" + ) + + def describe_portal(self, name: bytes) -> "PGresult": + if not isinstance(name, bytes): + raise TypeError(f"'name' must be bytes, got {type(name)} instead") + self._ensure_pgconn() + rv = impl.PQdescribePortal(self._pgconn_ptr, name) + if not rv: + raise e.OperationalError(f"describe portal failed: {error_message(self)}") + return PGresult(rv) + + def send_describe_portal(self, name: bytes) -> None: + if not isinstance(name, bytes): + raise TypeError(f"bytes expected, got {type(name)} instead") + self._ensure_pgconn() + if not impl.PQsendDescribePortal(self._pgconn_ptr, name): + raise e.OperationalError( + f"sending describe portal failed: {error_message(self)}" + ) + + def get_result(self) -> Optional["PGresult"]: + rv = impl.PQgetResult(self._pgconn_ptr) + return PGresult(rv) if rv else None + + def consume_input(self) -> None: + if 1 != impl.PQconsumeInput(self._pgconn_ptr): + raise e.OperationalError(f"consuming input failed: {error_message(self)}") + + def is_busy(self) -> int: + return impl.PQisBusy(self._pgconn_ptr) + + @property + def nonblocking(self) -> int: + return impl.PQisnonblocking(self._pgconn_ptr) + + @nonblocking.setter + def nonblocking(self, arg: int) -> None: + if 0 > impl.PQsetnonblocking(self._pgconn_ptr, arg): + raise e.OperationalError( + f"setting nonblocking failed: {error_message(self)}" + ) + + def flush(self) -> int: + # PQflush segfaults if it receives a NULL connection + if not self._pgconn_ptr: + raise e.OperationalError("flushing failed: the connection is closed") + rv: int = impl.PQflush(self._pgconn_ptr) + if rv < 0: + raise e.OperationalError(f"flushing failed: {error_message(self)}") + return rv + + def set_single_row_mode(self) -> None: + if not impl.PQsetSingleRowMode(self._pgconn_ptr): + raise e.OperationalError("setting single row mode failed") + + def get_cancel(self) -> "PGcancel": + """ + Create an object with the information needed to cancel a command. + + See :pq:`PQgetCancel` for details. + """ + rv = impl.PQgetCancel(self._pgconn_ptr) + if not rv: + raise e.OperationalError("couldn't create cancel object") + return PGcancel(rv) + + def notifies(self) -> Optional[PGnotify]: + ptr = impl.PQnotifies(self._pgconn_ptr) + if ptr: + c = ptr.contents + return PGnotify(c.relname, c.be_pid, c.extra) + impl.PQfreemem(ptr) + else: + return None + + def put_copy_data(self, buffer: "abc.Buffer") -> int: + if not isinstance(buffer, bytes): + buffer = bytes(buffer) + rv = impl.PQputCopyData(self._pgconn_ptr, buffer, len(buffer)) + if rv < 0: + raise e.OperationalError(f"sending copy data failed: {error_message(self)}") + return rv + + def put_copy_end(self, error: Optional[bytes] = None) -> int: + rv = impl.PQputCopyEnd(self._pgconn_ptr, error) + if rv < 0: + raise e.OperationalError(f"sending copy end failed: {error_message(self)}") + return rv + + def get_copy_data(self, async_: int) -> Tuple[int, memoryview]: + buffer_ptr = c_char_p() + nbytes = impl.PQgetCopyData(self._pgconn_ptr, byref(buffer_ptr), async_) + if nbytes == -2: + raise e.OperationalError( + f"receiving copy data failed: {error_message(self)}" + ) + if buffer_ptr: + # TODO: do it without copy + data = string_at(buffer_ptr, nbytes) + impl.PQfreemem(buffer_ptr) + return nbytes, memoryview(data) + else: + return nbytes, memoryview(b"") + + def trace(self, fileno: int) -> None: + """ + Enable tracing of the client/server communication to a file stream. + + See :pq:`PQtrace` for details. + """ + if sys.platform != "linux": + raise e.NotSupportedError("currently only supported on Linux") + stream = impl.fdopen(fileno, b"w") + impl.PQtrace(self._pgconn_ptr, stream) + + def set_trace_flags(self, flags: Trace) -> None: + """ + Configure tracing behavior of client/server communication. + + :param flags: operating mode of tracing. + + See :pq:`PQsetTraceFlags` for details. + """ + impl.PQsetTraceFlags(self._pgconn_ptr, flags) + + def untrace(self) -> None: + """ + Disable tracing, previously enabled through `trace()`. + + See :pq:`PQuntrace` for details. + """ + impl.PQuntrace(self._pgconn_ptr) + + def encrypt_password( + self, passwd: bytes, user: bytes, algorithm: Optional[bytes] = None + ) -> bytes: + """ + Return the encrypted form of a PostgreSQL password. + + See :pq:`PQencryptPasswordConn` for details. + """ + out = impl.PQencryptPasswordConn(self._pgconn_ptr, passwd, user, algorithm) + if not out: + raise e.OperationalError( + f"password encryption failed: {error_message(self)}" + ) + + rv = string_at(out) + impl.PQfreemem(out) + return rv + + def make_empty_result(self, exec_status: int) -> "PGresult": + rv = impl.PQmakeEmptyPGresult(self._pgconn_ptr, exec_status) + if not rv: + raise MemoryError("couldn't allocate empty PGresult") + return PGresult(rv) + + @property + def pipeline_status(self) -> int: + if version() < 140000: + return 0 + return impl.PQpipelineStatus(self._pgconn_ptr) + + def enter_pipeline_mode(self) -> None: + """Enter pipeline mode. + + :raises ~e.OperationalError: in case of failure to enter the pipeline + mode. + """ + if impl.PQenterPipelineMode(self._pgconn_ptr) != 1: + raise e.OperationalError("failed to enter pipeline mode") + + def exit_pipeline_mode(self) -> None: + """Exit pipeline mode. + + :raises ~e.OperationalError: in case of failure to exit the pipeline + mode. + """ + if impl.PQexitPipelineMode(self._pgconn_ptr) != 1: + raise e.OperationalError(error_message(self)) + + def pipeline_sync(self) -> None: + """Mark a synchronization point in a pipeline. + + :raises ~e.OperationalError: if the connection is not in pipeline mode + or if sync failed. + """ + rv = impl.PQpipelineSync(self._pgconn_ptr) + if rv == 0: + raise e.OperationalError("connection not in pipeline mode") + if rv != 1: + raise e.OperationalError("failed to sync pipeline") + + def send_flush_request(self) -> None: + """Sends a request for the server to flush its output buffer. + + :raises ~e.OperationalError: if the flush request failed. + """ + if impl.PQsendFlushRequest(self._pgconn_ptr) == 0: + raise e.OperationalError(f"flush request failed: {error_message(self)}") + + def _call_bytes( + self, func: Callable[[impl.PGconn_struct], Optional[bytes]] + ) -> bytes: + """ + Call one of the pgconn libpq functions returning a bytes pointer. + """ + if not self._pgconn_ptr: + raise e.OperationalError("the connection is closed") + rv = func(self._pgconn_ptr) + assert rv is not None + return rv + + def _call_int(self, func: Callable[[impl.PGconn_struct], int]) -> int: + """ + Call one of the pgconn libpq functions returning an int. + """ + if not self._pgconn_ptr: + raise e.OperationalError("the connection is closed") + return func(self._pgconn_ptr) + + def _call_bool(self, func: Callable[[impl.PGconn_struct], int]) -> bool: + """ + Call one of the pgconn libpq functions returning a logical value. + """ + if not self._pgconn_ptr: + raise e.OperationalError("the connection is closed") + return bool(func(self._pgconn_ptr)) + + def _ensure_pgconn(self) -> None: + if not self._pgconn_ptr: + raise e.OperationalError("the connection is closed") + + +class PGresult: + """ + Python representation of a libpq result. + """ + + __slots__ = ("_pgresult_ptr",) + + def __init__(self, pgresult_ptr: impl.PGresult_struct): + self._pgresult_ptr: Optional[impl.PGresult_struct] = pgresult_ptr + + def __del__(self) -> None: + self.clear() + + def __repr__(self) -> str: + cls = f"{self.__class__.__module__}.{self.__class__.__qualname__}" + status = ExecStatus(self.status) + return f"<{cls} [{status.name}] at 0x{id(self):x}>" + + def clear(self) -> None: + self._pgresult_ptr, p = None, self._pgresult_ptr + if p: + PQclear(p) + + @property + def pgresult_ptr(self) -> Optional[int]: + """The pointer to the underlying `!PGresult` structure, as integer. + + `!None` if the result was cleared. + + The value can be used to pass the structure to libpq functions which + psycopg doesn't (currently) wrap, either in C or in Python using FFI + libraries such as `ctypes`. + """ + if self._pgresult_ptr is None: + return None + + return addressof(self._pgresult_ptr.contents) # type: ignore[attr-defined] + + @property + def status(self) -> int: + return impl.PQresultStatus(self._pgresult_ptr) + + @property + def error_message(self) -> bytes: + return impl.PQresultErrorMessage(self._pgresult_ptr) + + def error_field(self, fieldcode: int) -> Optional[bytes]: + return impl.PQresultErrorField(self._pgresult_ptr, fieldcode) + + @property + def ntuples(self) -> int: + return impl.PQntuples(self._pgresult_ptr) + + @property + def nfields(self) -> int: + return impl.PQnfields(self._pgresult_ptr) + + def fname(self, column_number: int) -> Optional[bytes]: + return impl.PQfname(self._pgresult_ptr, column_number) + + def ftable(self, column_number: int) -> int: + return impl.PQftable(self._pgresult_ptr, column_number) + + def ftablecol(self, column_number: int) -> int: + return impl.PQftablecol(self._pgresult_ptr, column_number) + + def fformat(self, column_number: int) -> int: + return impl.PQfformat(self._pgresult_ptr, column_number) + + def ftype(self, column_number: int) -> int: + return impl.PQftype(self._pgresult_ptr, column_number) + + def fmod(self, column_number: int) -> int: + return impl.PQfmod(self._pgresult_ptr, column_number) + + def fsize(self, column_number: int) -> int: + return impl.PQfsize(self._pgresult_ptr, column_number) + + @property + def binary_tuples(self) -> int: + return impl.PQbinaryTuples(self._pgresult_ptr) + + def get_value(self, row_number: int, column_number: int) -> Optional[bytes]: + length: int = impl.PQgetlength(self._pgresult_ptr, row_number, column_number) + if length: + v = impl.PQgetvalue(self._pgresult_ptr, row_number, column_number) + return string_at(v, length) + else: + if impl.PQgetisnull(self._pgresult_ptr, row_number, column_number): + return None + else: + return b"" + + @property + def nparams(self) -> int: + return impl.PQnparams(self._pgresult_ptr) + + def param_type(self, param_number: int) -> int: + return impl.PQparamtype(self._pgresult_ptr, param_number) + + @property + def command_status(self) -> Optional[bytes]: + return impl.PQcmdStatus(self._pgresult_ptr) + + @property + def command_tuples(self) -> Optional[int]: + rv = impl.PQcmdTuples(self._pgresult_ptr) + return int(rv) if rv else None + + @property + def oid_value(self) -> int: + return impl.PQoidValue(self._pgresult_ptr) + + def set_attributes(self, descriptions: List[PGresAttDesc]) -> None: + structs = [ + impl.PGresAttDesc_struct(*desc) for desc in descriptions # type: ignore + ] + array = (impl.PGresAttDesc_struct * len(structs))(*structs) # type: ignore + rv = impl.PQsetResultAttrs(self._pgresult_ptr, len(structs), array) + if rv == 0: + raise e.OperationalError("PQsetResultAttrs failed") + + +class PGcancel: + """ + Token to cancel the current operation on a connection. + + Created by `PGconn.get_cancel()`. + """ + + __slots__ = ("pgcancel_ptr",) + + def __init__(self, pgcancel_ptr: impl.PGcancel_struct): + self.pgcancel_ptr: Optional[impl.PGcancel_struct] = pgcancel_ptr + + def __del__(self) -> None: + self.free() + + def free(self) -> None: + """ + Free the data structure created by :pq:`PQgetCancel()`. + + Automatically invoked by `!__del__()`. + + See :pq:`PQfreeCancel()` for details. + """ + self.pgcancel_ptr, p = None, self.pgcancel_ptr + if p: + PQfreeCancel(p) + + def cancel(self) -> None: + """Requests that the server abandon processing of the current command. + + See :pq:`PQcancel()` for details. + """ + buf = create_string_buffer(256) + res = impl.PQcancel( + self.pgcancel_ptr, + byref(buf), # type: ignore[arg-type] + len(buf), + ) + if not res: + raise e.OperationalError( + f"cancel failed: {buf.value.decode('utf8', 'ignore')}" + ) + + +class Conninfo: + """ + Utility object to manipulate connection strings. + """ + + @classmethod + def get_defaults(cls) -> List[ConninfoOption]: + opts = impl.PQconndefaults() + if not opts: + raise MemoryError("couldn't allocate connection defaults") + try: + return cls._options_from_array(opts) + finally: + impl.PQconninfoFree(opts) + + @classmethod + def parse(cls, conninfo: bytes) -> List[ConninfoOption]: + if not isinstance(conninfo, bytes): + raise TypeError(f"bytes expected, got {type(conninfo)} instead") + + errmsg = c_char_p() + rv = impl.PQconninfoParse(conninfo, byref(errmsg)) # type: ignore[arg-type] + if not rv: + if not errmsg: + raise MemoryError("couldn't allocate on conninfo parse") + else: + exc = e.OperationalError( + (errmsg.value or b"").decode("utf8", "replace") + ) + impl.PQfreemem(errmsg) + raise exc + + try: + return cls._options_from_array(rv) + finally: + impl.PQconninfoFree(rv) + + @classmethod + def _options_from_array( + cls, opts: Sequence[impl.PQconninfoOption_struct] + ) -> List[ConninfoOption]: + rv = [] + skws = "keyword envvar compiled val label dispchar".split() + for opt in opts: + if not opt.keyword: + break + d = {kw: getattr(opt, kw) for kw in skws} + d["dispsize"] = opt.dispsize + rv.append(ConninfoOption(**d)) + + return rv + + +class Escaping: + """ + Utility object to escape strings for SQL interpolation. + """ + + def __init__(self, conn: Optional[PGconn] = None): + self.conn = conn + + def escape_literal(self, data: "abc.Buffer") -> bytes: + if not self.conn: + raise e.OperationalError("escape_literal failed: no connection provided") + + self.conn._ensure_pgconn() + # TODO: might be done without copy (however C does that) + if not isinstance(data, bytes): + data = bytes(data) + out = impl.PQescapeLiteral(self.conn._pgconn_ptr, data, len(data)) + if not out: + raise e.OperationalError( + f"escape_literal failed: {error_message(self.conn)} bytes" + ) + rv = string_at(out) + impl.PQfreemem(out) + return rv + + def escape_identifier(self, data: "abc.Buffer") -> bytes: + if not self.conn: + raise e.OperationalError("escape_identifier failed: no connection provided") + + self.conn._ensure_pgconn() + + if not isinstance(data, bytes): + data = bytes(data) + out = impl.PQescapeIdentifier(self.conn._pgconn_ptr, data, len(data)) + if not out: + raise e.OperationalError( + f"escape_identifier failed: {error_message(self.conn)} bytes" + ) + rv = string_at(out) + impl.PQfreemem(out) + return rv + + def escape_string(self, data: "abc.Buffer") -> bytes: + if not isinstance(data, bytes): + data = bytes(data) + + if self.conn: + self.conn._ensure_pgconn() + error = c_int() + out = create_string_buffer(len(data) * 2 + 1) + impl.PQescapeStringConn( + self.conn._pgconn_ptr, + byref(out), # type: ignore[arg-type] + data, + len(data), + byref(error), # type: ignore[arg-type] + ) + + if error: + raise e.OperationalError( + f"escape_string failed: {error_message(self.conn)} bytes" + ) + + else: + out = create_string_buffer(len(data) * 2 + 1) + impl.PQescapeString( + byref(out), # type: ignore[arg-type] + data, + len(data), + ) + + return out.value + + def escape_bytea(self, data: "abc.Buffer") -> bytes: + len_out = c_size_t() + # TODO: might be able to do without a copy but it's a mess. + # the C library does it better anyway, so maybe not worth optimising + # https://mail.python.org/pipermail/python-dev/2012-September/121780.html + if not isinstance(data, bytes): + data = bytes(data) + if self.conn: + self.conn._ensure_pgconn() + out = impl.PQescapeByteaConn( + self.conn._pgconn_ptr, + data, + len(data), + byref(t_cast(c_ulong, len_out)), # type: ignore[arg-type] + ) + else: + out = impl.PQescapeBytea( + data, + len(data), + byref(t_cast(c_ulong, len_out)), # type: ignore[arg-type] + ) + if not out: + raise MemoryError( + f"couldn't allocate for escape_bytea of {len(data)} bytes" + ) + + rv = string_at(out, len_out.value - 1) # out includes final 0 + impl.PQfreemem(out) + return rv + + def unescape_bytea(self, data: "abc.Buffer") -> bytes: + # not needed, but let's keep it symmetric with the escaping: + # if a connection is passed in, it must be valid. + if self.conn: + self.conn._ensure_pgconn() + + len_out = c_size_t() + if not isinstance(data, bytes): + data = bytes(data) + out = impl.PQunescapeBytea( + data, + byref(t_cast(c_ulong, len_out)), # type: ignore[arg-type] + ) + if not out: + raise MemoryError( + f"couldn't allocate for unescape_bytea of {len(data)} bytes" + ) + + rv = string_at(out, len_out.value) + impl.PQfreemem(out) + return rv + + +# importing the ssl module sets up Python's libcrypto callbacks +import ssl # noqa + +# disable libcrypto setup in libpq, so it won't stomp on the callbacks +# that have already been set up +impl.PQinitOpenSSL(1, 0) + +__build_version__ = version() diff --git a/lib/python3.11/site-packages/psycopg/py.typed b/lib/python3.11/site-packages/psycopg/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/lib/python3.11/site-packages/psycopg/rows.py b/lib/python3.11/site-packages/psycopg/rows.py new file mode 100644 index 0000000..cb28b57 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/rows.py @@ -0,0 +1,256 @@ +""" +psycopg row factories +""" + +# Copyright (C) 2021 The Psycopg Team + +import functools +from typing import Any, Callable, Dict, List, Optional, NamedTuple, NoReturn +from typing import TYPE_CHECKING, Sequence, Tuple, Type, TypeVar +from collections import namedtuple +from typing_extensions import TypeAlias + +from . import pq +from . import errors as e +from ._compat import Protocol +from ._encodings import _as_python_identifier + +if TYPE_CHECKING: + from .cursor import BaseCursor, Cursor + from .cursor_async import AsyncCursor + from psycopg.pq.abc import PGresult + +COMMAND_OK = pq.ExecStatus.COMMAND_OK +TUPLES_OK = pq.ExecStatus.TUPLES_OK +SINGLE_TUPLE = pq.ExecStatus.SINGLE_TUPLE + +T = TypeVar("T", covariant=True) + +# Row factories + +Row = TypeVar("Row", covariant=True) + + +class RowMaker(Protocol[Row]): + """ + Callable protocol taking a sequence of value and returning an object. + + The sequence of value is what is returned from a database query, already + adapted to the right Python types. The return value is the object that your + program would like to receive: by default (`tuple_row()`) it is a simple + tuple, but it may be any type of object. + + Typically, `!RowMaker` functions are returned by `RowFactory`. + """ + + def __call__(self, __values: Sequence[Any]) -> Row: + ... + + +class RowFactory(Protocol[Row]): + """ + Callable protocol taking a `~psycopg.Cursor` and returning a `RowMaker`. + + A `!RowFactory` is typically called when a `!Cursor` receives a result. + This way it can inspect the cursor state (for instance the + `~psycopg.Cursor.description` attribute) and help a `!RowMaker` to create + a complete object. + + For instance the `dict_row()` `!RowFactory` uses the names of the column to + define the dictionary key and returns a `!RowMaker` function which would + use the values to create a dictionary for each record. + """ + + def __call__(self, __cursor: "Cursor[Any]") -> RowMaker[Row]: + ... + + +class AsyncRowFactory(Protocol[Row]): + """ + Like `RowFactory`, taking an async cursor as argument. + """ + + def __call__(self, __cursor: "AsyncCursor[Any]") -> RowMaker[Row]: + ... + + +class BaseRowFactory(Protocol[Row]): + """ + Like `RowFactory`, taking either type of cursor as argument. + """ + + def __call__(self, __cursor: "BaseCursor[Any, Any]") -> RowMaker[Row]: + ... + + +TupleRow: TypeAlias = Tuple[Any, ...] +""" +An alias for the type returned by `tuple_row()` (i.e. a tuple of any content). +""" + + +DictRow: TypeAlias = Dict[str, Any] +""" +An alias for the type returned by `dict_row()` + +A `!DictRow` is a dictionary with keys as string and any value returned by the +database. +""" + + +def tuple_row(cursor: "BaseCursor[Any, Any]") -> "RowMaker[TupleRow]": + r"""Row factory to represent rows as simple tuples. + + This is the default factory, used when `~psycopg.Connection.connect()` or + `~psycopg.Connection.cursor()` are called without a `!row_factory` + parameter. + + """ + # Implementation detail: make sure this is the tuple type itself, not an + # equivalent function, because the C code fast-paths on it. + return tuple + + +def dict_row(cursor: "BaseCursor[Any, Any]") -> "RowMaker[DictRow]": + """Row factory to represent rows as dictionaries. + + The dictionary keys are taken from the column names of the returned columns. + """ + names = _get_names(cursor) + if names is None: + return no_result + + def dict_row_(values: Sequence[Any]) -> Dict[str, Any]: + # https://github.com/python/mypy/issues/2608 + return dict(zip(names, values)) # type: ignore[arg-type] + + return dict_row_ + + +def namedtuple_row( + cursor: "BaseCursor[Any, Any]", +) -> "RowMaker[NamedTuple]": + """Row factory to represent rows as `~collections.namedtuple`. + + The field names are taken from the column names of the returned columns, + with some mangling to deal with invalid names. + """ + res = cursor.pgresult + if not res: + return no_result + + nfields = _get_nfields(res) + if nfields is None: + return no_result + + nt = _make_nt(cursor._encoding, *(res.fname(i) for i in range(nfields))) + return nt._make + + +@functools.lru_cache(512) +def _make_nt(enc: str, *names: bytes) -> Type[NamedTuple]: + snames = tuple(_as_python_identifier(n.decode(enc)) for n in names) + return namedtuple("Row", snames) # type: ignore[return-value] + + +def class_row(cls: Type[T]) -> BaseRowFactory[T]: + r"""Generate a row factory to represent rows as instances of the class `!cls`. + + The class must support every output column name as a keyword parameter. + + :param cls: The class to return for each row. It must support the fields + returned by the query as keyword arguments. + :rtype: `!Callable[[Cursor],` `RowMaker`\[~T]] + """ + + def class_row_(cursor: "BaseCursor[Any, Any]") -> "RowMaker[T]": + names = _get_names(cursor) + if names is None: + return no_result + + def class_row__(values: Sequence[Any]) -> T: + return cls(**dict(zip(names, values))) # type: ignore[arg-type] + + return class_row__ + + return class_row_ + + +def args_row(func: Callable[..., T]) -> BaseRowFactory[T]: + """Generate a row factory calling `!func` with positional parameters for every row. + + :param func: The function to call for each row. It must support the fields + returned by the query as positional arguments. + """ + + def args_row_(cur: "BaseCursor[Any, T]") -> "RowMaker[T]": + def args_row__(values: Sequence[Any]) -> T: + return func(*values) + + return args_row__ + + return args_row_ + + +def kwargs_row(func: Callable[..., T]) -> BaseRowFactory[T]: + """Generate a row factory calling `!func` with keyword parameters for every row. + + :param func: The function to call for each row. It must support the fields + returned by the query as keyword arguments. + """ + + def kwargs_row_(cursor: "BaseCursor[Any, T]") -> "RowMaker[T]": + names = _get_names(cursor) + if names is None: + return no_result + + def kwargs_row__(values: Sequence[Any]) -> T: + return func(**dict(zip(names, values))) # type: ignore[arg-type] + + return kwargs_row__ + + return kwargs_row_ + + +def no_result(values: Sequence[Any]) -> NoReturn: + """A `RowMaker` that always fail. + + It can be used as return value for a `RowFactory` called with no result. + Note that the `!RowFactory` *will* be called with no result, but the + resulting `!RowMaker` never should. + """ + raise e.InterfaceError("the cursor doesn't have a result") + + +def _get_names(cursor: "BaseCursor[Any, Any]") -> Optional[List[str]]: + res = cursor.pgresult + if not res: + return None + + nfields = _get_nfields(res) + if nfields is None: + return None + + enc = cursor._encoding + return [ + res.fname(i).decode(enc) for i in range(nfields) # type: ignore[union-attr] + ] + + +def _get_nfields(res: "PGresult") -> Optional[int]: + """ + Return the number of columns in a result, if it returns tuples else None + + Take into account the special case of results with zero columns. + """ + nfields = res.nfields + + if ( + res.status == TUPLES_OK + or res.status == SINGLE_TUPLE + # "describe" in named cursors + or (res.status == COMMAND_OK and nfields) + ): + return nfields + else: + return None diff --git a/lib/python3.11/site-packages/psycopg/server_cursor.py b/lib/python3.11/site-packages/psycopg/server_cursor.py new file mode 100644 index 0000000..7a86e59 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/server_cursor.py @@ -0,0 +1,478 @@ +""" +psycopg server-side cursor objects. +""" + +# Copyright (C) 2020 The Psycopg Team + +from typing import Any, AsyncIterator, List, Iterable, Iterator +from typing import Optional, TypeVar, TYPE_CHECKING, overload +from warnings import warn + +from . import pq +from . import sql +from . import errors as e +from .abc import ConnectionType, Query, Params, PQGen +from .rows import Row, RowFactory, AsyncRowFactory +from .cursor import BaseCursor, Cursor +from .generators import execute +from .cursor_async import AsyncCursor + +if TYPE_CHECKING: + from .connection import Connection + from .connection_async import AsyncConnection + +DEFAULT_ITERSIZE = 100 + +TEXT = pq.Format.TEXT +BINARY = pq.Format.BINARY + +COMMAND_OK = pq.ExecStatus.COMMAND_OK +TUPLES_OK = pq.ExecStatus.TUPLES_OK + +IDLE = pq.TransactionStatus.IDLE +INTRANS = pq.TransactionStatus.INTRANS + + +class ServerCursorMixin(BaseCursor[ConnectionType, Row]): + """Mixin to add ServerCursor behaviour and implementation a BaseCursor.""" + + __slots__ = "_name _scrollable _withhold _described itersize _format".split() + + def __init__( + self, + name: str, + scrollable: Optional[bool], + withhold: bool, + ): + self._name = name + self._scrollable = scrollable + self._withhold = withhold + self._described = False + self.itersize: int = DEFAULT_ITERSIZE + self._format = TEXT + + def __repr__(self) -> str: + # Insert the name as the second word + parts = super().__repr__().split(None, 1) + parts.insert(1, f"{self._name!r}") + return " ".join(parts) + + @property + def name(self) -> str: + """The name of the cursor.""" + return self._name + + @property + def scrollable(self) -> Optional[bool]: + """ + Whether the cursor is scrollable or not. + + If `!None` leave the choice to the server. Use `!True` if you want to + use `scroll()` on the cursor. + """ + return self._scrollable + + @property + def withhold(self) -> bool: + """ + If the cursor can be used after the creating transaction has committed. + """ + return self._withhold + + @property + def rownumber(self) -> Optional[int]: + """Index of the next row to fetch in the current result. + + `!None` if there is no result to fetch. + """ + res = self.pgresult + # command_status is empty if the result comes from + # describe_portal, which means that we have just executed the DECLARE, + # so we can assume we are at the first row. + tuples = res and (res.status == TUPLES_OK or res.command_status == b"") + return self._pos if tuples else None + + def _declare_gen( + self, + query: Query, + params: Optional[Params] = None, + binary: Optional[bool] = None, + ) -> PQGen[None]: + """Generator implementing `ServerCursor.execute()`.""" + + query = self._make_declare_statement(query) + + # If the cursor is being reused, the previous one must be closed. + if self._described: + yield from self._close_gen() + self._described = False + + yield from self._start_query(query) + pgq = self._convert_query(query, params) + self._execute_send(pgq, force_extended=True) + results = yield from execute(self._conn.pgconn) + if results[-1].status != COMMAND_OK: + self._raise_for_result(results[-1]) + + # Set the format, which will be used by describe and fetch operations + if binary is None: + self._format = self.format + else: + self._format = BINARY if binary else TEXT + + # The above result only returned COMMAND_OK. Get the cursor shape + yield from self._describe_gen() + + def _describe_gen(self) -> PQGen[None]: + self._pgconn.send_describe_portal(self._name.encode(self._encoding)) + results = yield from execute(self._pgconn) + self._check_results(results) + self._results = results + self._select_current_result(0, format=self._format) + self._described = True + + def _close_gen(self) -> PQGen[None]: + ts = self._conn.pgconn.transaction_status + + # if the connection is not in a sane state, don't even try + if ts != IDLE and ts != INTRANS: + return + + # If we are IDLE, a WITHOUT HOLD cursor will surely have gone already. + if not self._withhold and ts == IDLE: + return + + # if we didn't declare the cursor ourselves we still have to close it + # but we must make sure it exists. + if not self._described: + query = sql.SQL( + "SELECT 1 FROM pg_catalog.pg_cursors WHERE name = {}" + ).format(sql.Literal(self._name)) + res = yield from self._conn._exec_command(query) + # pipeline mode otherwise, unsupported here. + assert res is not None + if res.ntuples == 0: + return + + query = sql.SQL("CLOSE {}").format(sql.Identifier(self._name)) + yield from self._conn._exec_command(query) + + def _fetch_gen(self, num: Optional[int]) -> PQGen[List[Row]]: + if self.closed: + raise e.InterfaceError("the cursor is closed") + # If we are stealing the cursor, make sure we know its shape + if not self._described: + yield from self._start_query() + yield from self._describe_gen() + + query = sql.SQL("FETCH FORWARD {} FROM {}").format( + sql.SQL("ALL") if num is None else sql.Literal(num), + sql.Identifier(self._name), + ) + res = yield from self._conn._exec_command(query, result_format=self._format) + # pipeline mode otherwise, unsupported here. + assert res is not None + + self.pgresult = res + self._tx.set_pgresult(res, set_loaders=False) + return self._tx.load_rows(0, res.ntuples, self._make_row) + + def _scroll_gen(self, value: int, mode: str) -> PQGen[None]: + if mode not in ("relative", "absolute"): + raise ValueError(f"bad mode: {mode}. It should be 'relative' or 'absolute'") + query = sql.SQL("MOVE{} {} FROM {}").format( + sql.SQL(" ABSOLUTE" if mode == "absolute" else ""), + sql.Literal(value), + sql.Identifier(self._name), + ) + yield from self._conn._exec_command(query) + + def _make_declare_statement(self, query: Query) -> sql.Composed: + if isinstance(query, bytes): + query = query.decode(self._encoding) + if not isinstance(query, sql.Composable): + query = sql.SQL(query) + + parts = [ + sql.SQL("DECLARE"), + sql.Identifier(self._name), + ] + if self._scrollable is not None: + parts.append(sql.SQL("SCROLL" if self._scrollable else "NO SCROLL")) + parts.append(sql.SQL("CURSOR")) + if self._withhold: + parts.append(sql.SQL("WITH HOLD")) + parts.append(sql.SQL("FOR")) + parts.append(query) + + return sql.SQL(" ").join(parts) + + +class ServerCursor(ServerCursorMixin["Connection[Any]", Row], Cursor[Row]): + __module__ = "psycopg" + __slots__ = () + _Self = TypeVar("_Self", bound="ServerCursor[Any]") + + @overload + def __init__( + self: "ServerCursor[Row]", + connection: "Connection[Row]", + name: str, + *, + scrollable: Optional[bool] = None, + withhold: bool = False, + ): + ... + + @overload + def __init__( + self: "ServerCursor[Row]", + connection: "Connection[Any]", + name: str, + *, + row_factory: RowFactory[Row], + scrollable: Optional[bool] = None, + withhold: bool = False, + ): + ... + + def __init__( + self, + connection: "Connection[Any]", + name: str, + *, + row_factory: Optional[RowFactory[Row]] = None, + scrollable: Optional[bool] = None, + withhold: bool = False, + ): + Cursor.__init__( + self, connection, row_factory=row_factory or connection.row_factory + ) + ServerCursorMixin.__init__(self, name, scrollable, withhold) + + def __del__(self) -> None: + if not self.closed: + warn( + f"the server-side cursor {self} was deleted while still open." + " Please use 'with' or '.close()' to close the cursor properly", + ResourceWarning, + ) + + def close(self) -> None: + """ + Close the current cursor and free associated resources. + """ + with self._conn.lock: + if self.closed: + return + if not self._conn.closed: + self._conn.wait(self._close_gen()) + super().close() + + def execute( + self: _Self, + query: Query, + params: Optional[Params] = None, + *, + binary: Optional[bool] = None, + **kwargs: Any, + ) -> _Self: + """ + Open a cursor to execute a query to the database. + """ + if kwargs: + raise TypeError(f"keyword not supported: {list(kwargs)[0]}") + if self._pgconn.pipeline_status: + raise e.NotSupportedError( + "server-side cursors not supported in pipeline mode" + ) + + try: + with self._conn.lock: + self._conn.wait(self._declare_gen(query, params, binary)) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + return self + + def executemany( + self, + query: Query, + params_seq: Iterable[Params], + *, + returning: bool = True, + ) -> None: + """Method not implemented for server-side cursors.""" + raise e.NotSupportedError("executemany not supported on server-side cursors") + + def fetchone(self) -> Optional[Row]: + with self._conn.lock: + recs = self._conn.wait(self._fetch_gen(1)) + if recs: + self._pos += 1 + return recs[0] + else: + return None + + def fetchmany(self, size: int = 0) -> List[Row]: + if not size: + size = self.arraysize + with self._conn.lock: + recs = self._conn.wait(self._fetch_gen(size)) + self._pos += len(recs) + return recs + + def fetchall(self) -> List[Row]: + with self._conn.lock: + recs = self._conn.wait(self._fetch_gen(None)) + self._pos += len(recs) + return recs + + def __iter__(self) -> Iterator[Row]: + while True: + with self._conn.lock: + recs = self._conn.wait(self._fetch_gen(self.itersize)) + for rec in recs: + self._pos += 1 + yield rec + if len(recs) < self.itersize: + break + + def scroll(self, value: int, mode: str = "relative") -> None: + with self._conn.lock: + self._conn.wait(self._scroll_gen(value, mode)) + # Postgres doesn't have a reliable way to report a cursor out of bound + if mode == "relative": + self._pos += value + else: + self._pos = value + + +class AsyncServerCursor( + ServerCursorMixin["AsyncConnection[Any]", Row], AsyncCursor[Row] +): + __module__ = "psycopg" + __slots__ = () + _Self = TypeVar("_Self", bound="AsyncServerCursor[Any]") + + @overload + def __init__( + self: "AsyncServerCursor[Row]", + connection: "AsyncConnection[Row]", + name: str, + *, + scrollable: Optional[bool] = None, + withhold: bool = False, + ): + ... + + @overload + def __init__( + self: "AsyncServerCursor[Row]", + connection: "AsyncConnection[Any]", + name: str, + *, + row_factory: AsyncRowFactory[Row], + scrollable: Optional[bool] = None, + withhold: bool = False, + ): + ... + + def __init__( + self, + connection: "AsyncConnection[Any]", + name: str, + *, + row_factory: Optional[AsyncRowFactory[Row]] = None, + scrollable: Optional[bool] = None, + withhold: bool = False, + ): + AsyncCursor.__init__( + self, connection, row_factory=row_factory or connection.row_factory + ) + ServerCursorMixin.__init__(self, name, scrollable, withhold) + + def __del__(self) -> None: + if not self.closed: + warn( + f"the server-side cursor {self} was deleted while still open." + " Please use 'with' or '.close()' to close the cursor properly", + ResourceWarning, + ) + + async def close(self) -> None: + async with self._conn.lock: + if self.closed: + return + if not self._conn.closed: + await self._conn.wait(self._close_gen()) + await super().close() + + async def execute( + self: _Self, + query: Query, + params: Optional[Params] = None, + *, + binary: Optional[bool] = None, + **kwargs: Any, + ) -> _Self: + if kwargs: + raise TypeError(f"keyword not supported: {list(kwargs)[0]}") + if self._pgconn.pipeline_status: + raise e.NotSupportedError( + "server-side cursors not supported in pipeline mode" + ) + + try: + async with self._conn.lock: + await self._conn.wait(self._declare_gen(query, params, binary)) + except e._NO_TRACEBACK as ex: + raise ex.with_traceback(None) + + return self + + async def executemany( + self, + query: Query, + params_seq: Iterable[Params], + *, + returning: bool = True, + ) -> None: + raise e.NotSupportedError("executemany not supported on server-side cursors") + + async def fetchone(self) -> Optional[Row]: + async with self._conn.lock: + recs = await self._conn.wait(self._fetch_gen(1)) + if recs: + self._pos += 1 + return recs[0] + else: + return None + + async def fetchmany(self, size: int = 0) -> List[Row]: + if not size: + size = self.arraysize + async with self._conn.lock: + recs = await self._conn.wait(self._fetch_gen(size)) + self._pos += len(recs) + return recs + + async def fetchall(self) -> List[Row]: + async with self._conn.lock: + recs = await self._conn.wait(self._fetch_gen(None)) + self._pos += len(recs) + return recs + + async def __aiter__(self) -> AsyncIterator[Row]: + while True: + async with self._conn.lock: + recs = await self._conn.wait(self._fetch_gen(self.itersize)) + for rec in recs: + self._pos += 1 + yield rec + if len(recs) < self.itersize: + break + + async def scroll(self, value: int, mode: str = "relative") -> None: + async with self._conn.lock: + await self._conn.wait(self._scroll_gen(value, mode)) diff --git a/lib/python3.11/site-packages/psycopg/sql.py b/lib/python3.11/site-packages/psycopg/sql.py new file mode 100644 index 0000000..099a01c --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/sql.py @@ -0,0 +1,467 @@ +""" +SQL composition utility module +""" + +# Copyright (C) 2020 The Psycopg Team + +import codecs +import string +from abc import ABC, abstractmethod +from typing import Any, Iterator, Iterable, List, Optional, Sequence, Union + +from .pq import Escaping +from .abc import AdaptContext +from .adapt import Transformer, PyFormat +from ._compat import LiteralString +from ._encodings import conn_encoding + + +def quote(obj: Any, context: Optional[AdaptContext] = None) -> str: + """ + Adapt a Python object to a quoted SQL string. + + Use this function only if you absolutely want to convert a Python string to + an SQL quoted literal to use e.g. to generate batch SQL and you won't have + a connection available when you will need to use it. + + This function is relatively inefficient, because it doesn't cache the + adaptation rules. If you pass a `!context` you can adapt the adaptation + rules used, otherwise only global rules are used. + + """ + return Literal(obj).as_string(context) + + +class Composable(ABC): + """ + Abstract base class for objects that can be used to compose an SQL string. + + `!Composable` objects can be passed directly to + `~psycopg.Cursor.execute()`, `~psycopg.Cursor.executemany()`, + `~psycopg.Cursor.copy()` in place of the query string. + + `!Composable` objects can be joined using the ``+`` operator: the result + will be a `Composed` instance containing the objects joined. The operator + ``*`` is also supported with an integer argument: the result is a + `!Composed` instance containing the left argument repeated as many times as + requested. + """ + + def __init__(self, obj: Any): + self._obj = obj + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self._obj!r})" + + @abstractmethod + def as_bytes(self, context: Optional[AdaptContext]) -> bytes: + """ + Return the value of the object as bytes. + + :param context: the context to evaluate the object into. + :type context: `connection` or `cursor` + + The method is automatically invoked by `~psycopg.Cursor.execute()`, + `~psycopg.Cursor.executemany()`, `~psycopg.Cursor.copy()` if a + `!Composable` is passed instead of the query string. + + """ + raise NotImplementedError + + def as_string(self, context: Optional[AdaptContext]) -> str: + """ + Return the value of the object as string. + + :param context: the context to evaluate the string into. + :type context: `connection` or `cursor` + + """ + conn = context.connection if context else None + enc = conn_encoding(conn) + b = self.as_bytes(context) + if isinstance(b, bytes): + return b.decode(enc) + else: + # buffer object + return codecs.lookup(enc).decode(b)[0] + + def __add__(self, other: "Composable") -> "Composed": + if isinstance(other, Composed): + return Composed([self]) + other + if isinstance(other, Composable): + return Composed([self]) + Composed([other]) + else: + return NotImplemented + + def __mul__(self, n: int) -> "Composed": + return Composed([self] * n) + + def __eq__(self, other: Any) -> bool: + return type(self) is type(other) and self._obj == other._obj + + def __ne__(self, other: Any) -> bool: + return not self.__eq__(other) + + +class Composed(Composable): + """ + A `Composable` object made of a sequence of `!Composable`. + + The object is usually created using `!Composable` operators and methods. + However it is possible to create a `!Composed` directly specifying a + sequence of objects as arguments: if they are not `!Composable` they will + be wrapped in a `Literal`. + + Example:: + + >>> comp = sql.Composed( + ... [sql.SQL("INSERT INTO "), sql.Identifier("table")]) + >>> print(comp.as_string(conn)) + INSERT INTO "table" + + `!Composed` objects are iterable (so they can be used in `SQL.join` for + instance). + """ + + _obj: List[Composable] + + def __init__(self, seq: Sequence[Any]): + seq = [obj if isinstance(obj, Composable) else Literal(obj) for obj in seq] + super().__init__(seq) + + def as_bytes(self, context: Optional[AdaptContext]) -> bytes: + return b"".join(obj.as_bytes(context) for obj in self._obj) + + def __iter__(self) -> Iterator[Composable]: + return iter(self._obj) + + def __add__(self, other: Composable) -> "Composed": + if isinstance(other, Composed): + return Composed(self._obj + other._obj) + if isinstance(other, Composable): + return Composed(self._obj + [other]) + else: + return NotImplemented + + def join(self, joiner: Union["SQL", LiteralString]) -> "Composed": + """ + Return a new `!Composed` interposing the `!joiner` with the `!Composed` items. + + The `!joiner` must be a `SQL` or a string which will be interpreted as + an `SQL`. + + Example:: + + >>> fields = sql.Identifier('foo') + sql.Identifier('bar') # a Composed + >>> print(fields.join(', ').as_string(conn)) + "foo", "bar" + + """ + if isinstance(joiner, str): + joiner = SQL(joiner) + elif not isinstance(joiner, SQL): + raise TypeError( + "Composed.join() argument must be strings or SQL," + f" got {joiner!r} instead" + ) + + return joiner.join(self._obj) + + +class SQL(Composable): + """ + A `Composable` representing a snippet of SQL statement. + + `!SQL` exposes `join()` and `format()` methods useful to create a template + where to merge variable parts of a query (for instance field or table + names). + + The `!obj` string doesn't undergo any form of escaping, so it is not + suitable to represent variable identifiers or values: you should only use + it to pass constant strings representing templates or snippets of SQL + statements; use other objects such as `Identifier` or `Literal` to + represent variable parts. + + Example:: + + >>> query = sql.SQL("SELECT {0} FROM {1}").format( + ... sql.SQL(', ').join([sql.Identifier('foo'), sql.Identifier('bar')]), + ... sql.Identifier('table')) + >>> print(query.as_string(conn)) + SELECT "foo", "bar" FROM "table" + """ + + _obj: LiteralString + _formatter = string.Formatter() + + def __init__(self, obj: LiteralString): + super().__init__(obj) + if not isinstance(obj, str): + raise TypeError(f"SQL values must be strings, got {obj!r} instead") + + def as_string(self, context: Optional[AdaptContext]) -> str: + return self._obj + + def as_bytes(self, context: Optional[AdaptContext]) -> bytes: + enc = "utf-8" + if context: + enc = conn_encoding(context.connection) + return self._obj.encode(enc) + + def format(self, *args: Any, **kwargs: Any) -> Composed: + """ + Merge `Composable` objects into a template. + + :param args: parameters to replace to numbered (``{0}``, ``{1}``) or + auto-numbered (``{}``) placeholders + :param kwargs: parameters to replace to named (``{name}``) placeholders + :return: the union of the `!SQL` string with placeholders replaced + :rtype: `Composed` + + The method is similar to the Python `str.format()` method: the string + template supports auto-numbered (``{}``), numbered (``{0}``, + ``{1}``...), and named placeholders (``{name}``), with positional + arguments replacing the numbered placeholders and keywords replacing + the named ones. However placeholder modifiers (``{0!r}``, ``{0:<10}``) + are not supported. + + If a `!Composable` objects is passed to the template it will be merged + according to its `as_string()` method. If any other Python object is + passed, it will be wrapped in a `Literal` object and so escaped + according to SQL rules. + + Example:: + + >>> print(sql.SQL("SELECT * FROM {} WHERE {} = %s") + ... .format(sql.Identifier('people'), sql.Identifier('id')) + ... .as_string(conn)) + SELECT * FROM "people" WHERE "id" = %s + + >>> print(sql.SQL("SELECT * FROM {tbl} WHERE name = {name}") + ... .format(tbl=sql.Identifier('people'), name="O'Rourke")) + ... .as_string(conn)) + SELECT * FROM "people" WHERE name = 'O''Rourke' + + """ + rv: List[Composable] = [] + autonum: Optional[int] = 0 + # TODO: this is probably not the right way to whitelist pre + # pyre complains. Will wait for mypy to complain too to fix. + pre: LiteralString + for pre, name, spec, conv in self._formatter.parse(self._obj): + if spec: + raise ValueError("no format specification supported by SQL") + if conv: + raise ValueError("no format conversion supported by SQL") + if pre: + rv.append(SQL(pre)) + + if name is None: + continue + + if name.isdigit(): + if autonum: + raise ValueError( + "cannot switch from automatic field numbering to manual" + ) + rv.append(args[int(name)]) + autonum = None + + elif not name: + if autonum is None: + raise ValueError( + "cannot switch from manual field numbering to automatic" + ) + rv.append(args[autonum]) + autonum += 1 + + else: + rv.append(kwargs[name]) + + return Composed(rv) + + def join(self, seq: Iterable[Composable]) -> Composed: + """ + Join a sequence of `Composable`. + + :param seq: the elements to join. + :type seq: iterable of `!Composable` + + Use the `!SQL` object's string to separate the elements in `!seq`. + Note that `Composed` objects are iterable too, so they can be used as + argument for this method. + + Example:: + + >>> snip = sql.SQL(', ').join( + ... sql.Identifier(n) for n in ['foo', 'bar', 'baz']) + >>> print(snip.as_string(conn)) + "foo", "bar", "baz" + """ + rv = [] + it = iter(seq) + try: + rv.append(next(it)) + except StopIteration: + pass + else: + for i in it: + rv.append(self) + rv.append(i) + + return Composed(rv) + + +class Identifier(Composable): + """ + A `Composable` representing an SQL identifier or a dot-separated sequence. + + Identifiers usually represent names of database objects, such as tables or + fields. PostgreSQL identifiers follow `different rules`__ than SQL string + literals for escaping (e.g. they use double quotes instead of single). + + .. __: https://www.postgresql.org/docs/current/sql-syntax-lexical.html# \ + SQL-SYNTAX-IDENTIFIERS + + Example:: + + >>> t1 = sql.Identifier("foo") + >>> t2 = sql.Identifier("ba'r") + >>> t3 = sql.Identifier('ba"z') + >>> print(sql.SQL(', ').join([t1, t2, t3]).as_string(conn)) + "foo", "ba'r", "ba""z" + + Multiple strings can be passed to the object to represent a qualified name, + i.e. a dot-separated sequence of identifiers. + + Example:: + + >>> query = sql.SQL("SELECT {} FROM {}").format( + ... sql.Identifier("table", "field"), + ... sql.Identifier("schema", "table")) + >>> print(query.as_string(conn)) + SELECT "table"."field" FROM "schema"."table" + + """ + + _obj: Sequence[str] + + def __init__(self, *strings: str): + # init super() now to make the __repr__ not explode in case of error + super().__init__(strings) + + if not strings: + raise TypeError("Identifier cannot be empty") + + for s in strings: + if not isinstance(s, str): + raise TypeError( + f"SQL identifier parts must be strings, got {s!r} instead" + ) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({', '.join(map(repr, self._obj))})" + + def as_bytes(self, context: Optional[AdaptContext]) -> bytes: + conn = context.connection if context else None + if not conn: + raise ValueError("a connection is necessary for Identifier") + esc = Escaping(conn.pgconn) + enc = conn_encoding(conn) + escs = [esc.escape_identifier(s.encode(enc)) for s in self._obj] + return b".".join(escs) + + +class Literal(Composable): + """ + A `Composable` representing an SQL value to include in a query. + + Usually you will want to include placeholders in the query and pass values + as `~cursor.execute()` arguments. If however you really really need to + include a literal value in the query you can use this object. + + The string returned by `!as_string()` follows the normal :ref:`adaptation + rules ` for Python objects. + + Example:: + + >>> s1 = sql.Literal("fo'o") + >>> s2 = sql.Literal(42) + >>> s3 = sql.Literal(date(2000, 1, 1)) + >>> print(sql.SQL(', ').join([s1, s2, s3]).as_string(conn)) + 'fo''o', 42, '2000-01-01'::date + + """ + + def as_bytes(self, context: Optional[AdaptContext]) -> bytes: + tx = Transformer.from_context(context) + return tx.as_literal(self._obj) + + +class Placeholder(Composable): + """A `Composable` representing a placeholder for query parameters. + + If the name is specified, generate a named placeholder (e.g. ``%(name)s``, + ``%(name)b``), otherwise generate a positional placeholder (e.g. ``%s``, + ``%b``). + + The object is useful to generate SQL queries with a variable number of + arguments. + + Examples:: + + >>> names = ['foo', 'bar', 'baz'] + + >>> q1 = sql.SQL("INSERT INTO my_table ({}) VALUES ({})").format( + ... sql.SQL(', ').join(map(sql.Identifier, names)), + ... sql.SQL(', ').join(sql.Placeholder() * len(names))) + >>> print(q1.as_string(conn)) + INSERT INTO my_table ("foo", "bar", "baz") VALUES (%s, %s, %s) + + >>> q2 = sql.SQL("INSERT INTO my_table ({}) VALUES ({})").format( + ... sql.SQL(', ').join(map(sql.Identifier, names)), + ... sql.SQL(', ').join(map(sql.Placeholder, names))) + >>> print(q2.as_string(conn)) + INSERT INTO my_table ("foo", "bar", "baz") VALUES (%(foo)s, %(bar)s, %(baz)s) + + """ + + def __init__(self, name: str = "", format: Union[str, PyFormat] = PyFormat.AUTO): + super().__init__(name) + if not isinstance(name, str): + raise TypeError(f"expected string as name, got {name!r}") + + if ")" in name: + raise ValueError(f"invalid name: {name!r}") + + if type(format) is str: + format = PyFormat(format) + if not isinstance(format, PyFormat): + raise TypeError( + f"expected PyFormat as format, got {type(format).__name__!r}" + ) + + self._format: PyFormat = format + + def __repr__(self) -> str: + parts = [] + if self._obj: + parts.append(repr(self._obj)) + if self._format is not PyFormat.AUTO: + parts.append(f"format={self._format.name}") + + return f"{self.__class__.__name__}({', '.join(parts)})" + + def as_string(self, context: Optional[AdaptContext]) -> str: + code = self._format.value + return f"%({self._obj}){code}" if self._obj else f"%{code}" + + def as_bytes(self, context: Optional[AdaptContext]) -> bytes: + conn = context.connection if context else None + enc = conn_encoding(conn) + return self.as_string(context).encode(enc) + + +# Literals +NULL = SQL("NULL") +DEFAULT = SQL("DEFAULT") diff --git a/lib/python3.11/site-packages/psycopg/transaction.py b/lib/python3.11/site-packages/psycopg/transaction.py new file mode 100644 index 0000000..e13486e --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/transaction.py @@ -0,0 +1,290 @@ +""" +Transaction context managers returned by Connection.transaction() +""" + +# Copyright (C) 2020 The Psycopg Team + +import logging + +from types import TracebackType +from typing import Generic, Iterator, Optional, Type, Union, TypeVar, TYPE_CHECKING + +from . import pq +from . import sql +from . import errors as e +from .abc import ConnectionType, PQGen + +if TYPE_CHECKING: + from typing import Any + from .connection import Connection + from .connection_async import AsyncConnection + +IDLE = pq.TransactionStatus.IDLE + +OK = pq.ConnStatus.OK + +logger = logging.getLogger(__name__) + + +class Rollback(Exception): + """ + Exit the current `Transaction` context immediately and rollback any changes + made within this context. + + If a transaction context is specified in the constructor, rollback + enclosing transactions contexts up to and including the one specified. + """ + + __module__ = "psycopg" + + def __init__( + self, + transaction: Union["Transaction", "AsyncTransaction", None] = None, + ): + self.transaction = transaction + + def __repr__(self) -> str: + return f"{self.__class__.__qualname__}({self.transaction!r})" + + +class OutOfOrderTransactionNesting(e.ProgrammingError): + """Out-of-order transaction nesting detected""" + + +class BaseTransaction(Generic[ConnectionType]): + def __init__( + self, + connection: ConnectionType, + savepoint_name: Optional[str] = None, + force_rollback: bool = False, + ): + self._conn = connection + self.pgconn = self._conn.pgconn + self._savepoint_name = savepoint_name or "" + self.force_rollback = force_rollback + self._entered = self._exited = False + self._outer_transaction = False + self._stack_index = -1 + + @property + def savepoint_name(self) -> Optional[str]: + """ + The name of the savepoint; `!None` if handling the main transaction. + """ + # Yes, it may change on __enter__. No, I don't care, because the + # un-entered state is outside the public interface. + return self._savepoint_name + + def __repr__(self) -> str: + cls = f"{self.__class__.__module__}.{self.__class__.__qualname__}" + info = pq.misc.connection_summary(self.pgconn) + if not self._entered: + status = "inactive" + elif not self._exited: + status = "active" + else: + status = "terminated" + + sp = f"{self.savepoint_name!r} " if self.savepoint_name else "" + return f"<{cls} {sp}({status}) {info} at 0x{id(self):x}>" + + def _enter_gen(self) -> PQGen[None]: + if self._entered: + raise TypeError("transaction blocks can be used only once") + self._entered = True + + self._push_savepoint() + for command in self._get_enter_commands(): + yield from self._conn._exec_command(command) + + def _exit_gen( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> PQGen[bool]: + if not exc_val and not self.force_rollback: + yield from self._commit_gen() + return False + else: + # try to rollback, but if there are problems (connection in a bad + # state) just warn without clobbering the exception bubbling up. + try: + return (yield from self._rollback_gen(exc_val)) + except OutOfOrderTransactionNesting: + # Clobber an exception happened in the block with the exception + # caused by out-of-order transaction detected, so make the + # behaviour consistent with _commit_gen and to make sure the + # user fixes this condition, which is unrelated from + # operational error that might arise in the block. + raise + except Exception as exc2: + logger.warning("error ignored in rollback of %s: %s", self, exc2) + return False + + def _commit_gen(self) -> PQGen[None]: + ex = self._pop_savepoint("commit") + self._exited = True + if ex: + raise ex + + for command in self._get_commit_commands(): + yield from self._conn._exec_command(command) + + def _rollback_gen(self, exc_val: Optional[BaseException]) -> PQGen[bool]: + if isinstance(exc_val, Rollback): + logger.debug(f"{self._conn}: Explicit rollback from: ", exc_info=True) + + ex = self._pop_savepoint("rollback") + self._exited = True + if ex: + raise ex + + for command in self._get_rollback_commands(): + yield from self._conn._exec_command(command) + + if isinstance(exc_val, Rollback): + if not exc_val.transaction or exc_val.transaction is self: + return True # Swallow the exception + + return False + + def _get_enter_commands(self) -> Iterator[bytes]: + if self._outer_transaction: + yield self._conn._get_tx_start_command() + + if self._savepoint_name: + yield ( + sql.SQL("SAVEPOINT {}") + .format(sql.Identifier(self._savepoint_name)) + .as_bytes(self._conn) + ) + + def _get_commit_commands(self) -> Iterator[bytes]: + if self._savepoint_name and not self._outer_transaction: + yield ( + sql.SQL("RELEASE {}") + .format(sql.Identifier(self._savepoint_name)) + .as_bytes(self._conn) + ) + + if self._outer_transaction: + assert not self._conn._num_transactions + yield b"COMMIT" + + def _get_rollback_commands(self) -> Iterator[bytes]: + if self._savepoint_name and not self._outer_transaction: + yield ( + sql.SQL("ROLLBACK TO {n}") + .format(n=sql.Identifier(self._savepoint_name)) + .as_bytes(self._conn) + ) + yield ( + sql.SQL("RELEASE {n}") + .format(n=sql.Identifier(self._savepoint_name)) + .as_bytes(self._conn) + ) + + if self._outer_transaction: + assert not self._conn._num_transactions + yield b"ROLLBACK" + + # Also clear the prepared statements cache. + if self._conn._prepared.clear(): + yield from self._conn._prepared.get_maintenance_commands() + + def _push_savepoint(self) -> None: + """ + Push the transaction on the connection transactions stack. + + Also set the internal state of the object and verify consistency. + """ + self._outer_transaction = self.pgconn.transaction_status == IDLE + if self._outer_transaction: + # outer transaction: if no name it's only a begin, else + # there will be an additional savepoint + assert not self._conn._num_transactions + else: + # inner transaction: it always has a name + if not self._savepoint_name: + self._savepoint_name = f"_pg3_{self._conn._num_transactions + 1}" + + self._stack_index = self._conn._num_transactions + self._conn._num_transactions += 1 + + def _pop_savepoint(self, action: str) -> Optional[Exception]: + """ + Pop the transaction from the connection transactions stack. + + Also verify the state consistency. + """ + self._conn._num_transactions -= 1 + if self._conn._num_transactions == self._stack_index: + return None + + return OutOfOrderTransactionNesting( + f"transaction {action} at the wrong nesting level: {self}" + ) + + +class Transaction(BaseTransaction["Connection[Any]"]): + """ + Returned by `Connection.transaction()` to handle a transaction block. + """ + + __module__ = "psycopg" + + _Self = TypeVar("_Self", bound="Transaction") + + @property + def connection(self) -> "Connection[Any]": + """The connection the object is managing.""" + return self._conn + + def __enter__(self: _Self) -> _Self: + with self._conn.lock: + self._conn.wait(self._enter_gen()) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> bool: + if self.pgconn.status == OK: + with self._conn.lock: + return self._conn.wait(self._exit_gen(exc_type, exc_val, exc_tb)) + else: + return False + + +class AsyncTransaction(BaseTransaction["AsyncConnection[Any]"]): + """ + Returned by `AsyncConnection.transaction()` to handle a transaction block. + """ + + __module__ = "psycopg" + + _Self = TypeVar("_Self", bound="AsyncTransaction") + + @property + def connection(self) -> "AsyncConnection[Any]": + return self._conn + + async def __aenter__(self: _Self) -> _Self: + async with self._conn.lock: + await self._conn.wait(self._enter_gen()) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> bool: + if self.pgconn.status == OK: + async with self._conn.lock: + return await self._conn.wait(self._exit_gen(exc_type, exc_val, exc_tb)) + else: + return False diff --git a/lib/python3.11/site-packages/psycopg/types/__init__.py b/lib/python3.11/site-packages/psycopg/types/__init__.py new file mode 100644 index 0000000..bdddf05 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/__init__.py @@ -0,0 +1,11 @@ +""" +psycopg types package +""" + +# Copyright (C) 2020 The Psycopg Team + +from .. import _typeinfo + +# Exposed here +TypeInfo = _typeinfo.TypeInfo +TypesRegistry = _typeinfo.TypesRegistry diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/__init__.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..36d71bc Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/__init__.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/array.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/array.cpython-311.pyc new file mode 100644 index 0000000..bf6bf8d Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/array.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/bool.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/bool.cpython-311.pyc new file mode 100644 index 0000000..680c4ce Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/bool.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/composite.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/composite.cpython-311.pyc new file mode 100644 index 0000000..2c55339 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/composite.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/datetime.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/datetime.cpython-311.pyc new file mode 100644 index 0000000..b56db79 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/datetime.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/enum.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/enum.cpython-311.pyc new file mode 100644 index 0000000..e0cfba6 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/enum.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/hstore.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/hstore.cpython-311.pyc new file mode 100644 index 0000000..8fa3096 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/hstore.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/json.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/json.cpython-311.pyc new file mode 100644 index 0000000..f5bac43 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/json.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/multirange.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/multirange.cpython-311.pyc new file mode 100644 index 0000000..f4a2a4a Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/multirange.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/net.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/net.cpython-311.pyc new file mode 100644 index 0000000..50cd46a Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/net.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/none.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/none.cpython-311.pyc new file mode 100644 index 0000000..8109f6e Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/none.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/numeric.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/numeric.cpython-311.pyc new file mode 100644 index 0000000..54c8d70 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/numeric.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/range.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/range.cpython-311.pyc new file mode 100644 index 0000000..80c0a7b Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/range.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/shapely.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/shapely.cpython-311.pyc new file mode 100644 index 0000000..9122e7b Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/shapely.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/string.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/string.cpython-311.pyc new file mode 100644 index 0000000..0d66873 Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/string.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/__pycache__/uuid.cpython-311.pyc b/lib/python3.11/site-packages/psycopg/types/__pycache__/uuid.cpython-311.pyc new file mode 100644 index 0000000..822f7cf Binary files /dev/null and b/lib/python3.11/site-packages/psycopg/types/__pycache__/uuid.cpython-311.pyc differ diff --git a/lib/python3.11/site-packages/psycopg/types/array.py b/lib/python3.11/site-packages/psycopg/types/array.py new file mode 100644 index 0000000..419e3d4 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/array.py @@ -0,0 +1,460 @@ +""" +Adapters for arrays +""" + +# Copyright (C) 2020 The Psycopg Team + +import re +import struct +from typing import Any, cast, Callable, List, Optional, Pattern, Set, Tuple, Type + +from .. import pq +from .. import errors as e +from .. import postgres +from ..abc import AdaptContext, Buffer, Dumper, DumperKey, NoneType, Loader, Transformer +from ..adapt import RecursiveDumper, RecursiveLoader, PyFormat +from .._compat import cache, prod +from .._struct import pack_len, unpack_len +from .._cmodule import _psycopg +from ..postgres import TEXT_OID, INVALID_OID +from .._typeinfo import TypeInfo + +_struct_head = struct.Struct("!III") # ndims, hasnull, elem oid +_pack_head = cast(Callable[[int, int, int], bytes], _struct_head.pack) +_unpack_head = cast(Callable[[Buffer], Tuple[int, int, int]], _struct_head.unpack_from) +_struct_dim = struct.Struct("!II") # dim, lower bound +_pack_dim = cast(Callable[[int, int], bytes], _struct_dim.pack) +_unpack_dim = cast(Callable[[Buffer, int], Tuple[int, int]], _struct_dim.unpack_from) + +TEXT_ARRAY_OID = postgres.types["text"].array_oid + +PY_TEXT = PyFormat.TEXT +PQ_BINARY = pq.Format.BINARY + + +class BaseListDumper(RecursiveDumper): + element_oid = 0 + + def __init__(self, cls: type, context: Optional[AdaptContext] = None): + if cls is NoneType: + cls = list + + super().__init__(cls, context) + self.sub_dumper: Optional[Dumper] = None + if self.element_oid and context: + sdclass = context.adapters.get_dumper_by_oid(self.element_oid, self.format) + self.sub_dumper = sdclass(NoneType, context) + + def _find_list_element(self, L: List[Any], format: PyFormat) -> Any: + """ + Find the first non-null element of an eventually nested list + """ + items = list(self._flatiter(L, set())) + types = {type(item): item for item in items} + if not types: + return None + + if len(types) == 1: + t, v = types.popitem() + else: + # More than one type in the list. It might be still good, as long + # as they dump with the same oid (e.g. IPv4Network, IPv6Network). + dumpers = [self._tx.get_dumper(item, format) for item in types.values()] + oids = set(d.oid for d in dumpers) + if len(oids) == 1: + t, v = types.popitem() + else: + raise e.DataError( + "cannot dump lists of mixed types;" + f" got: {', '.join(sorted(t.__name__ for t in types))}" + ) + + # Checking for precise type. If the type is a subclass (e.g. Int4) + # we assume the user knows what type they are passing. + if t is not int: + return v + + # If we got an int, let's see what is the biggest one in order to + # choose the smallest OID and allow Postgres to do the right cast. + imax: int = max(items) + imin: int = min(items) + if imin >= 0: + return imax + else: + return max(imax, -imin - 1) + + def _flatiter(self, L: List[Any], seen: Set[int]) -> Any: + if id(L) in seen: + raise e.DataError("cannot dump a recursive list") + + seen.add(id(L)) + + for item in L: + if type(item) is list: + yield from self._flatiter(item, seen) + elif item is not None: + yield item + + return None + + def _get_base_type_info(self, base_oid: int) -> TypeInfo: + """ + Return info about the base type. + + Return text info as fallback. + """ + if base_oid: + info = self._tx.adapters.types.get(base_oid) + if info: + return info + + return self._tx.adapters.types["text"] + + +class ListDumper(BaseListDumper): + delimiter = b"," + + def get_key(self, obj: List[Any], format: PyFormat) -> DumperKey: + if self.oid: + return self.cls + + item = self._find_list_element(obj, format) + if item is None: + return self.cls + + sd = self._tx.get_dumper(item, format) + return (self.cls, sd.get_key(item, format)) + + def upgrade(self, obj: List[Any], format: PyFormat) -> "BaseListDumper": + # If we have an oid we don't need to upgrade + if self.oid: + return self + + item = self._find_list_element(obj, format) + if item is None: + # Empty lists can only be dumped as text if the type is unknown. + return self + + sd = self._tx.get_dumper(item, PyFormat.from_pq(self.format)) + dumper = type(self)(self.cls, self._tx) + dumper.sub_dumper = sd + + # We consider an array of unknowns as unknown, so we can dump empty + # lists or lists containing only None elements. + if sd.oid != INVALID_OID: + info = self._get_base_type_info(sd.oid) + dumper.oid = info.array_oid or TEXT_ARRAY_OID + dumper.delimiter = info.delimiter.encode() + else: + dumper.oid = INVALID_OID + + return dumper + + # Double quotes and backslashes embedded in element values will be + # backslash-escaped. + _re_esc = re.compile(rb'(["\\])') + + def dump(self, obj: List[Any]) -> bytes: + tokens: List[Buffer] = [] + needs_quotes = _get_needs_quotes_regexp(self.delimiter).search + + def dump_list(obj: List[Any]) -> None: + if not obj: + tokens.append(b"{}") + return + + tokens.append(b"{") + for item in obj: + if isinstance(item, list): + dump_list(item) + elif item is not None: + ad = self._dump_item(item) + if needs_quotes(ad): + if not isinstance(ad, bytes): + ad = bytes(ad) + ad = b'"' + self._re_esc.sub(rb"\\\1", ad) + b'"' + tokens.append(ad) + else: + tokens.append(b"NULL") + + tokens.append(self.delimiter) + + tokens[-1] = b"}" + + dump_list(obj) + + return b"".join(tokens) + + def _dump_item(self, item: Any) -> Buffer: + if self.sub_dumper: + return self.sub_dumper.dump(item) + else: + return self._tx.get_dumper(item, PY_TEXT).dump(item) + + +@cache +def _get_needs_quotes_regexp(delimiter: bytes) -> Pattern[bytes]: + """Return a regexp to recognise when a value needs quotes + + from https://www.postgresql.org/docs/current/arrays.html#ARRAYS-IO + + The array output routine will put double quotes around element values if + they are empty strings, contain curly braces, delimiter characters, + double quotes, backslashes, or white space, or match the word NULL. + """ + return re.compile( + rb"""(?xi) + ^$ # the empty string + | ["{}%s\\\s] # or a char to escape + | ^null$ # or the word NULL + """ + % delimiter + ) + + +class ListBinaryDumper(BaseListDumper): + format = pq.Format.BINARY + + def get_key(self, obj: List[Any], format: PyFormat) -> DumperKey: + if self.oid: + return self.cls + + item = self._find_list_element(obj, format) + if item is None: + return (self.cls,) + + sd = self._tx.get_dumper(item, format) + return (self.cls, sd.get_key(item, format)) + + def upgrade(self, obj: List[Any], format: PyFormat) -> "BaseListDumper": + # If we have an oid we don't need to upgrade + if self.oid: + return self + + item = self._find_list_element(obj, format) + if item is None: + return ListDumper(self.cls, self._tx) + + sd = self._tx.get_dumper(item, format.from_pq(self.format)) + dumper = type(self)(self.cls, self._tx) + dumper.sub_dumper = sd + info = self._get_base_type_info(sd.oid) + dumper.oid = info.array_oid or TEXT_ARRAY_OID + + return dumper + + def dump(self, obj: List[Any]) -> bytes: + # Postgres won't take unknown for element oid: fall back on text + sub_oid = self.sub_dumper and self.sub_dumper.oid or TEXT_OID + + if not obj: + return _pack_head(0, 0, sub_oid) + + data: List[Buffer] = [b"", b""] # placeholders to avoid a resize + dims: List[int] = [] + hasnull = 0 + + def calc_dims(L: List[Any]) -> None: + if isinstance(L, self.cls): + if not L: + raise e.DataError("lists cannot contain empty lists") + dims.append(len(L)) + calc_dims(L[0]) + + calc_dims(obj) + + def dump_list(L: List[Any], dim: int) -> None: + nonlocal hasnull + if len(L) != dims[dim]: + raise e.DataError("nested lists have inconsistent lengths") + + if dim == len(dims) - 1: + for item in L: + if item is not None: + # If we get here, the sub_dumper must have been set + ad = self.sub_dumper.dump(item) # type: ignore[union-attr] + data.append(pack_len(len(ad))) + data.append(ad) + else: + hasnull = 1 + data.append(b"\xff\xff\xff\xff") + else: + for item in L: + if not isinstance(item, self.cls): + raise e.DataError("nested lists have inconsistent depths") + dump_list(item, dim + 1) # type: ignore + + dump_list(obj, 0) + + data[0] = _pack_head(len(dims), hasnull, sub_oid) + data[1] = b"".join(_pack_dim(dim, 1) for dim in dims) + return b"".join(data) + + +class ArrayLoader(RecursiveLoader): + delimiter = b"," + base_oid: int + + def load(self, data: Buffer) -> List[Any]: + loader = self._tx.get_loader(self.base_oid, self.format) + return _load_text(data, loader, self.delimiter) + + +class ArrayBinaryLoader(RecursiveLoader): + format = pq.Format.BINARY + + def load(self, data: Buffer) -> List[Any]: + return _load_binary(data, self._tx) + + +def register_array(info: TypeInfo, context: Optional[AdaptContext] = None) -> None: + if not info.array_oid: + raise ValueError(f"the type info {info} doesn't describe an array") + + base: Type[Any] + adapters = context.adapters if context else postgres.adapters + + base = getattr(_psycopg, "ArrayLoader", ArrayLoader) + name = f"{info.name.title()}{base.__name__}" + attribs = { + "base_oid": info.oid, + "delimiter": info.delimiter.encode(), + } + loader = type(name, (base,), attribs) + adapters.register_loader(info.array_oid, loader) + + loader = getattr(_psycopg, "ArrayBinaryLoader", ArrayBinaryLoader) + adapters.register_loader(info.array_oid, loader) + + base = ListDumper + name = f"{info.name.title()}{base.__name__}" + attribs = { + "oid": info.array_oid, + "element_oid": info.oid, + "delimiter": info.delimiter.encode(), + } + dumper = type(name, (base,), attribs) + adapters.register_dumper(None, dumper) + + base = ListBinaryDumper + name = f"{info.name.title()}{base.__name__}" + attribs = { + "oid": info.array_oid, + "element_oid": info.oid, + } + dumper = type(name, (base,), attribs) + adapters.register_dumper(None, dumper) + + +def register_default_adapters(context: AdaptContext) -> None: + # The text dumper is more flexible as it can handle lists of mixed type, + # so register it later. + context.adapters.register_dumper(list, ListBinaryDumper) + context.adapters.register_dumper(list, ListDumper) + + +def register_all_arrays(context: AdaptContext) -> None: + """ + Associate the array oid of all the types in Loader.globals. + + This function is designed to be called once at import time, after having + registered all the base loaders. + """ + for t in context.adapters.types: + if t.array_oid: + t.register(context) + + +def _load_text( + data: Buffer, + loader: Loader, + delimiter: bytes = b",", + __re_unescape: Pattern[bytes] = re.compile(rb"\\(.)"), +) -> List[Any]: + rv = None + stack: List[Any] = [] + a: List[Any] = [] + rv = a + load = loader.load + + # Remove the dimensions information prefix (``[...]=``) + if data and data[0] == b"["[0]: + if isinstance(data, memoryview): + data = bytes(data) + idx = data.find(b"=") + if idx == -1: + raise e.DataError("malformed array: no '=' after dimension information") + data = data[idx + 1 :] + + re_parse = _get_array_parse_regexp(delimiter) + for m in re_parse.finditer(data): + t = m.group(1) + if t == b"{": + if stack: + stack[-1].append(a) + stack.append(a) + a = [] + + elif t == b"}": + if not stack: + raise e.DataError("malformed array: unexpected '}'") + rv = stack.pop() + + else: + if not stack: + wat = t[:10].decode("utf8", "replace") + "..." if len(t) > 10 else "" + raise e.DataError(f"malformed array: unexpected '{wat}'") + if t == b"NULL": + v = None + else: + if t.startswith(b'"'): + t = __re_unescape.sub(rb"\1", t[1:-1]) + v = load(t) + + stack[-1].append(v) + + assert rv is not None + return rv + + +@cache +def _get_array_parse_regexp(delimiter: bytes) -> Pattern[bytes]: + """ + Return a regexp to tokenize an array representation into item and brackets + """ + return re.compile( + rb"""(?xi) + ( [{}] # open or closed bracket + | " (?: [^"\\] | \\. )* " # or a quoted string + | [^"{}%s\\]+ # or an unquoted non-empty string + ) ,? + """ + % delimiter + ) + + +def _load_binary(data: Buffer, tx: Transformer) -> List[Any]: + ndims, hasnull, oid = _unpack_head(data) + load = tx.get_loader(oid, PQ_BINARY).load + + if not ndims: + return [] + + p = 12 + 8 * ndims + dims = [_unpack_dim(data, i)[0] for i in range(12, p, 8)] + nelems = prod(dims) + + out: List[Any] = [None] * nelems + for i in range(nelems): + size = unpack_len(data, p)[0] + p += 4 + if size == -1: + continue + out[i] = load(data[p : p + size]) + p += size + + # fon ndims > 1 we have to aggregate the array into sub-arrays + for dim in dims[-1:0:-1]: + out = [out[i : i + dim] for i in range(0, len(out), dim)] + + return out diff --git a/lib/python3.11/site-packages/psycopg/types/bool.py b/lib/python3.11/site-packages/psycopg/types/bool.py new file mode 100644 index 0000000..e259a11 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/bool.py @@ -0,0 +1,48 @@ +""" +Adapters for booleans. +""" + +# Copyright (C) 2020 The Psycopg Team + +from .. import postgres +from ..pq import Format +from ..abc import AdaptContext +from ..adapt import Buffer, Dumper, Loader + + +class BoolDumper(Dumper): + oid = postgres.types["bool"].oid + + def dump(self, obj: bool) -> bytes: + return b"t" if obj else b"f" + + def quote(self, obj: bool) -> bytes: + return b"true" if obj else b"false" + + +class BoolBinaryDumper(Dumper): + format = Format.BINARY + oid = postgres.types["bool"].oid + + def dump(self, obj: bool) -> bytes: + return b"\x01" if obj else b"\x00" + + +class BoolLoader(Loader): + def load(self, data: Buffer) -> bool: + return data == b"t" + + +class BoolBinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> bool: + return data != b"\x00" + + +def register_default_adapters(context: AdaptContext) -> None: + adapters = context.adapters + adapters.register_dumper(bool, BoolDumper) + adapters.register_dumper(bool, BoolBinaryDumper) + adapters.register_loader("bool", BoolLoader) + adapters.register_loader("bool", BoolBinaryLoader) diff --git a/lib/python3.11/site-packages/psycopg/types/composite.py b/lib/python3.11/site-packages/psycopg/types/composite.py new file mode 100644 index 0000000..40a1e17 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/composite.py @@ -0,0 +1,292 @@ +""" +Support for composite types adaptation. +""" + +# Copyright (C) 2020 The Psycopg Team + +import re +import struct +from collections import namedtuple +from typing import Any, Callable, cast, Dict, Iterator, List, Optional +from typing import Sequence, Tuple, Type + +from .. import pq +from .. import abc +from .. import postgres +from ..adapt import Transformer, PyFormat, RecursiveDumper, Loader, Dumper +from .._struct import pack_len, unpack_len +from ..postgres import TEXT_OID +from .._typeinfo import CompositeInfo as CompositeInfo # exported here +from .._encodings import _as_python_identifier + +_struct_oidlen = struct.Struct("!Ii") +_pack_oidlen = cast(Callable[[int, int], bytes], _struct_oidlen.pack) +_unpack_oidlen = cast( + Callable[[abc.Buffer, int], Tuple[int, int]], _struct_oidlen.unpack_from +) + + +class SequenceDumper(RecursiveDumper): + def _dump_sequence( + self, obj: Sequence[Any], start: bytes, end: bytes, sep: bytes + ) -> bytes: + if not obj: + return start + end + + parts: List[abc.Buffer] = [start] + + for item in obj: + if item is None: + parts.append(sep) + continue + + dumper = self._tx.get_dumper(item, PyFormat.from_pq(self.format)) + ad = dumper.dump(item) + if not ad: + ad = b'""' + elif self._re_needs_quotes.search(ad): + ad = b'"' + self._re_esc.sub(rb"\1\1", ad) + b'"' + + parts.append(ad) + parts.append(sep) + + parts[-1] = end + + return b"".join(parts) + + _re_needs_quotes = re.compile(rb'[",\\\s()]') + _re_esc = re.compile(rb"([\\\"])") + + +class TupleDumper(SequenceDumper): + # Should be this, but it doesn't work + # oid = postgres_types["record"].oid + + def dump(self, obj: Tuple[Any, ...]) -> bytes: + return self._dump_sequence(obj, b"(", b")", b",") + + +class TupleBinaryDumper(Dumper): + format = pq.Format.BINARY + + # Subclasses must set an info + info: CompositeInfo + + def __init__(self, cls: type, context: Optional[abc.AdaptContext] = None): + super().__init__(cls, context) + + # Note: this class is not a RecursiveDumper because it would use the + # same Transformer of the context, which would confuse dump_sequence() + # in case the composite contains another composite. Make sure to use + # a separate Transformer instance instead. + self._tx = Transformer(context) + self._tx.set_dumper_types(self.info.field_types, self.format) + + nfields = len(self.info.field_types) + self._formats = (PyFormat.from_pq(self.format),) * nfields + + def dump(self, obj: Tuple[Any, ...]) -> bytearray: + out = bytearray(pack_len(len(obj))) + adapted = self._tx.dump_sequence(obj, self._formats) + for i in range(len(obj)): + b = adapted[i] + oid = self.info.field_types[i] + if b is not None: + out += _pack_oidlen(oid, len(b)) + out += b + else: + out += _pack_oidlen(oid, -1) + + return out + + +class BaseCompositeLoader(Loader): + def __init__(self, oid: int, context: Optional[abc.AdaptContext] = None): + super().__init__(oid, context) + self._tx = Transformer(context) + + def _parse_record(self, data: abc.Buffer) -> Iterator[Optional[bytes]]: + """ + Split a non-empty representation of a composite type into components. + + Terminators shouldn't be used in `!data` (so that both record and range + representations can be parsed). + """ + for m in self._re_tokenize.finditer(data): + if m.group(1): + yield None + elif m.group(2) is not None: + yield self._re_undouble.sub(rb"\1", m.group(2)) + else: + yield m.group(3) + + # If the final group ended in `,` there is a final NULL in the record + # that the regexp couldn't parse. + if m and m.group().endswith(b","): + yield None + + _re_tokenize = re.compile( + rb"""(?x) + (,) # an empty token, representing NULL + | " ((?: [^"] | "")*) " ,? # or a quoted string + | ([^",)]+) ,? # or an unquoted string + """ + ) + + _re_undouble = re.compile(rb'(["\\])\1') + + +class RecordLoader(BaseCompositeLoader): + def load(self, data: abc.Buffer) -> Tuple[Any, ...]: + if data == b"()": + return () + + cast = self._tx.get_loader(TEXT_OID, self.format).load + return tuple( + cast(token) if token is not None else None + for token in self._parse_record(data[1:-1]) + ) + + +class RecordBinaryLoader(Loader): + format = pq.Format.BINARY + + def __init__(self, oid: int, context: Optional[abc.AdaptContext] = None): + super().__init__(oid, context) + self._ctx = context + # Cache a transformer for each sequence of oid found. + # Usually there will be only one, but if there is more than one + # row in the same query (in different columns, or even in different + # records), oids might differ and we'd need separate transformers. + self._txs: Dict[Tuple[int, ...], abc.Transformer] = {} + + def load(self, data: abc.Buffer) -> Tuple[Any, ...]: + nfields = unpack_len(data, 0)[0] + offset = 4 + oids = [] + record = [] + for _ in range(nfields): + oid, length = _unpack_oidlen(data, offset) + offset += 8 + record.append(data[offset : offset + length] if length != -1 else None) + oids.append(oid) + if length >= 0: + offset += length + + key = tuple(oids) + try: + tx = self._txs[key] + except KeyError: + tx = self._txs[key] = Transformer(self._ctx) + tx.set_loader_types(oids, self.format) + + return tx.load_sequence(tuple(record)) + + +class CompositeLoader(RecordLoader): + factory: Callable[..., Any] + fields_types: List[int] + _types_set = False + + def load(self, data: abc.Buffer) -> Any: + if not self._types_set: + self._config_types(data) + self._types_set = True + + if data == b"()": + return type(self).factory() + + return type(self).factory( + *self._tx.load_sequence(tuple(self._parse_record(data[1:-1]))) + ) + + def _config_types(self, data: abc.Buffer) -> None: + self._tx.set_loader_types(self.fields_types, self.format) + + +class CompositeBinaryLoader(RecordBinaryLoader): + format = pq.Format.BINARY + factory: Callable[..., Any] + + def load(self, data: abc.Buffer) -> Any: + r = super().load(data) + return type(self).factory(*r) + + +def register_composite( + info: CompositeInfo, + context: Optional[abc.AdaptContext] = None, + factory: Optional[Callable[..., Any]] = None, +) -> None: + """Register the adapters to load and dump a composite type. + + :param info: The object with the information about the composite to register. + :param context: The context where to register the adapters. If `!None`, + register it globally. + :param factory: Callable to convert the sequence of attributes read from + the composite into a Python object. + + .. note:: + + Registering the adapters doesn't affect objects already created, even + if they are children of the registered context. For instance, + registering the adapter globally doesn't affect already existing + connections. + """ + + # A friendly error warning instead of an AttributeError in case fetch() + # failed and it wasn't noticed. + if not info: + raise TypeError("no info passed. Is the requested composite available?") + + # Register arrays and type info + info.register(context) + + if not factory: + factory = namedtuple( # type: ignore + _as_python_identifier(info.name), + [_as_python_identifier(n) for n in info.field_names], + ) + + adapters = context.adapters if context else postgres.adapters + + # generate and register a customized text loader + loader: Type[BaseCompositeLoader] = type( + f"{info.name.title()}Loader", + (CompositeLoader,), + { + "factory": factory, + "fields_types": info.field_types, + }, + ) + adapters.register_loader(info.oid, loader) + + # generate and register a customized binary loader + loader = type( + f"{info.name.title()}BinaryLoader", + (CompositeBinaryLoader,), + {"factory": factory}, + ) + adapters.register_loader(info.oid, loader) + + # If the factory is a type, create and register dumpers for it + if isinstance(factory, type): + dumper = type( + f"{info.name.title()}BinaryDumper", + (TupleBinaryDumper,), + {"oid": info.oid, "info": info}, + ) + adapters.register_dumper(factory, dumper) + + # Default to the text dumper because it is more flexible + dumper = type(f"{info.name.title()}Dumper", (TupleDumper,), {"oid": info.oid}) + adapters.register_dumper(factory, dumper) + + info.python_type = factory + + +def register_default_adapters(context: abc.AdaptContext) -> None: + adapters = context.adapters + adapters.register_dumper(tuple, TupleDumper) + adapters.register_loader("record", RecordLoader) + adapters.register_loader("record", RecordBinaryLoader) diff --git a/lib/python3.11/site-packages/psycopg/types/datetime.py b/lib/python3.11/site-packages/psycopg/types/datetime.py new file mode 100644 index 0000000..3fc4356 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/datetime.py @@ -0,0 +1,730 @@ +""" +Adapters for date/time types. +""" + +# Copyright (C) 2020 The Psycopg Team + +import re +import struct +from datetime import date, datetime, time, timedelta, timezone +from typing import Any, Callable, cast, Optional, Tuple, TYPE_CHECKING + +from .. import postgres +from ..pq import Format +from .._tz import get_tzinfo +from ..abc import AdaptContext, DumperKey +from ..adapt import Buffer, Dumper, Loader, PyFormat +from ..errors import InterfaceError, DataError +from .._struct import pack_int4, pack_int8, unpack_int4, unpack_int8 + +if TYPE_CHECKING: + from ..connection import BaseConnection + +_struct_timetz = struct.Struct("!qi") # microseconds, sec tz offset +_pack_timetz = cast(Callable[[int, int], bytes], _struct_timetz.pack) +_unpack_timetz = cast(Callable[[Buffer], Tuple[int, int]], _struct_timetz.unpack) + +_struct_interval = struct.Struct("!qii") # microseconds, days, months +_pack_interval = cast(Callable[[int, int, int], bytes], _struct_interval.pack) +_unpack_interval = cast( + Callable[[Buffer], Tuple[int, int, int]], _struct_interval.unpack +) + +utc = timezone.utc +_pg_date_epoch_days = date(2000, 1, 1).toordinal() +_pg_datetime_epoch = datetime(2000, 1, 1) +_pg_datetimetz_epoch = datetime(2000, 1, 1, tzinfo=utc) +_py_date_min_days = date.min.toordinal() + + +class DateDumper(Dumper): + oid = postgres.types["date"].oid + + def dump(self, obj: date) -> bytes: + # NOTE: whatever the PostgreSQL DateStyle input format (DMY, MDY, YMD) + # the YYYY-MM-DD is always understood correctly. + return str(obj).encode() + + +class DateBinaryDumper(Dumper): + format = Format.BINARY + oid = postgres.types["date"].oid + + def dump(self, obj: date) -> bytes: + days = obj.toordinal() - _pg_date_epoch_days + return pack_int4(days) + + +class _BaseTimeDumper(Dumper): + def get_key(self, obj: time, format: PyFormat) -> DumperKey: + # Use (cls,) to report the need to upgrade to a dumper for timetz (the + # Frankenstein of the data types). + if not obj.tzinfo: + return self.cls + else: + return (self.cls,) + + def upgrade(self, obj: time, format: PyFormat) -> Dumper: + raise NotImplementedError + + +class _BaseTimeTextDumper(_BaseTimeDumper): + def dump(self, obj: time) -> bytes: + return str(obj).encode() + + +class TimeDumper(_BaseTimeTextDumper): + oid = postgres.types["time"].oid + + def upgrade(self, obj: time, format: PyFormat) -> Dumper: + if not obj.tzinfo: + return self + else: + return TimeTzDumper(self.cls) + + +class TimeTzDumper(_BaseTimeTextDumper): + oid = postgres.types["timetz"].oid + + +class TimeBinaryDumper(_BaseTimeDumper): + format = Format.BINARY + oid = postgres.types["time"].oid + + def dump(self, obj: time) -> bytes: + us = obj.microsecond + 1_000_000 * ( + obj.second + 60 * (obj.minute + 60 * obj.hour) + ) + return pack_int8(us) + + def upgrade(self, obj: time, format: PyFormat) -> Dumper: + if not obj.tzinfo: + return self + else: + return TimeTzBinaryDumper(self.cls) + + +class TimeTzBinaryDumper(_BaseTimeDumper): + format = Format.BINARY + oid = postgres.types["timetz"].oid + + def dump(self, obj: time) -> bytes: + us = obj.microsecond + 1_000_000 * ( + obj.second + 60 * (obj.minute + 60 * obj.hour) + ) + off = obj.utcoffset() + assert off is not None + return _pack_timetz(us, -int(off.total_seconds())) + + +class _BaseDatetimeDumper(Dumper): + def get_key(self, obj: datetime, format: PyFormat) -> DumperKey: + # Use (cls,) to report the need to upgrade (downgrade, actually) to a + # dumper for naive timestamp. + if obj.tzinfo: + return self.cls + else: + return (self.cls,) + + def upgrade(self, obj: datetime, format: PyFormat) -> Dumper: + raise NotImplementedError + + +class _BaseDatetimeTextDumper(_BaseDatetimeDumper): + def dump(self, obj: datetime) -> bytes: + # NOTE: whatever the PostgreSQL DateStyle input format (DMY, MDY, YMD) + # the YYYY-MM-DD is always understood correctly. + return str(obj).encode() + + +class DatetimeDumper(_BaseDatetimeTextDumper): + oid = postgres.types["timestamptz"].oid + + def upgrade(self, obj: datetime, format: PyFormat) -> Dumper: + if obj.tzinfo: + return self + else: + return DatetimeNoTzDumper(self.cls) + + +class DatetimeNoTzDumper(_BaseDatetimeTextDumper): + oid = postgres.types["timestamp"].oid + + +class DatetimeBinaryDumper(_BaseDatetimeDumper): + format = Format.BINARY + oid = postgres.types["timestamptz"].oid + + def dump(self, obj: datetime) -> bytes: + delta = obj - _pg_datetimetz_epoch + micros = delta.microseconds + 1_000_000 * (86_400 * delta.days + delta.seconds) + return pack_int8(micros) + + def upgrade(self, obj: datetime, format: PyFormat) -> Dumper: + if obj.tzinfo: + return self + else: + return DatetimeNoTzBinaryDumper(self.cls) + + +class DatetimeNoTzBinaryDumper(_BaseDatetimeDumper): + format = Format.BINARY + oid = postgres.types["timestamp"].oid + + def dump(self, obj: datetime) -> bytes: + delta = obj - _pg_datetime_epoch + micros = delta.microseconds + 1_000_000 * (86_400 * delta.days + delta.seconds) + return pack_int8(micros) + + +class TimedeltaDumper(Dumper): + oid = postgres.types["interval"].oid + + def __init__(self, cls: type, context: Optional[AdaptContext] = None): + super().__init__(cls, context) + if self.connection: + if ( + self.connection.pgconn.parameter_status(b"IntervalStyle") + == b"sql_standard" + ): + setattr(self, "dump", self._dump_sql) + + def dump(self, obj: timedelta) -> bytes: + # The comma is parsed ok by PostgreSQL but it's not documented + # and it seems brittle to rely on it. CRDB doesn't consume it well. + return str(obj).encode().replace(b",", b"") + + def _dump_sql(self, obj: timedelta) -> bytes: + # sql_standard format needs explicit signs + # otherwise -1 day 1 sec will mean -1 sec + return b"%+d day %+d second %+d microsecond" % ( + obj.days, + obj.seconds, + obj.microseconds, + ) + + +class TimedeltaBinaryDumper(Dumper): + format = Format.BINARY + oid = postgres.types["interval"].oid + + def dump(self, obj: timedelta) -> bytes: + micros = 1_000_000 * obj.seconds + obj.microseconds + return _pack_interval(micros, obj.days, 0) + + +class DateLoader(Loader): + _ORDER_YMD = 0 + _ORDER_DMY = 1 + _ORDER_MDY = 2 + + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + super().__init__(oid, context) + ds = _get_datestyle(self.connection) + if ds.startswith(b"I"): # ISO + self._order = self._ORDER_YMD + elif ds.startswith(b"G"): # German + self._order = self._ORDER_DMY + elif ds.startswith(b"S") or ds.startswith(b"P"): # SQL or Postgres + self._order = self._ORDER_DMY if ds.endswith(b"DMY") else self._ORDER_MDY + else: + raise InterfaceError(f"unexpected DateStyle: {ds.decode('ascii')}") + + def load(self, data: Buffer) -> date: + if self._order == self._ORDER_YMD: + ye = data[:4] + mo = data[5:7] + da = data[8:] + elif self._order == self._ORDER_DMY: + da = data[:2] + mo = data[3:5] + ye = data[6:] + else: + mo = data[:2] + da = data[3:5] + ye = data[6:] + + try: + return date(int(ye), int(mo), int(da)) + except ValueError as ex: + s = bytes(data).decode("utf8", "replace") + if s == "infinity" or (s and len(s.split()[0]) > 10): + raise DataError(f"date too large (after year 10K): {s!r}") from None + elif s == "-infinity" or "BC" in s: + raise DataError(f"date too small (before year 1): {s!r}") from None + else: + raise DataError(f"can't parse date {s!r}: {ex}") from None + + +class DateBinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> date: + days = unpack_int4(data)[0] + _pg_date_epoch_days + try: + return date.fromordinal(days) + except (ValueError, OverflowError): + if days < _py_date_min_days: + raise DataError("date too small (before year 1)") from None + else: + raise DataError("date too large (after year 10K)") from None + + +class TimeLoader(Loader): + _re_format = re.compile(rb"^(\d+):(\d+):(\d+)(?:\.(\d+))?") + + def load(self, data: Buffer) -> time: + m = self._re_format.match(data) + if not m: + s = bytes(data).decode("utf8", "replace") + raise DataError(f"can't parse time {s!r}") + + ho, mi, se, fr = m.groups() + + # Pad the fraction of second to get micros + if fr: + us = int(fr) + if len(fr) < 6: + us *= _uspad[len(fr)] + else: + us = 0 + + try: + return time(int(ho), int(mi), int(se), us) + except ValueError as e: + s = bytes(data).decode("utf8", "replace") + raise DataError(f"can't parse time {s!r}: {e}") from None + + +class TimeBinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> time: + val = unpack_int8(data)[0] + val, us = divmod(val, 1_000_000) + val, s = divmod(val, 60) + h, m = divmod(val, 60) + try: + return time(h, m, s, us) + except ValueError: + raise DataError(f"time not supported by Python: hour={h}") from None + + +class TimetzLoader(Loader): + _re_format = re.compile( + rb"""(?ix) + ^ + (\d+) : (\d+) : (\d+) (?: \. (\d+) )? # Time and micros + ([-+]) (\d+) (?: : (\d+) )? (?: : (\d+) )? # Timezone + $ + """ + ) + + def load(self, data: Buffer) -> time: + m = self._re_format.match(data) + if not m: + s = bytes(data).decode("utf8", "replace") + raise DataError(f"can't parse timetz {s!r}") + + ho, mi, se, fr, sgn, oh, om, os = m.groups() + + # Pad the fraction of second to get the micros + if fr: + us = int(fr) + if len(fr) < 6: + us *= _uspad[len(fr)] + else: + us = 0 + + # Calculate timezone + off = 60 * 60 * int(oh) + if om: + off += 60 * int(om) + if os: + off += int(os) + tz = timezone(timedelta(0, off if sgn == b"+" else -off)) + + try: + return time(int(ho), int(mi), int(se), us, tz) + except ValueError as e: + s = bytes(data).decode("utf8", "replace") + raise DataError(f"can't parse timetz {s!r}: {e}") from None + + +class TimetzBinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> time: + val, off = _unpack_timetz(data) + + val, us = divmod(val, 1_000_000) + val, s = divmod(val, 60) + h, m = divmod(val, 60) + + try: + return time(h, m, s, us, timezone(timedelta(seconds=-off))) + except ValueError: + raise DataError(f"time not supported by Python: hour={h}") from None + + +class TimestampLoader(Loader): + _re_format = re.compile( + rb"""(?ix) + ^ + (\d+) [^a-z0-9] (\d+) [^a-z0-9] (\d+) # Date + (?: T | [^a-z0-9] ) # Separator, including T + (\d+) [^a-z0-9] (\d+) [^a-z0-9] (\d+) # Time + (?: \.(\d+) )? # Micros + $ + """ + ) + _re_format_pg = re.compile( + rb"""(?ix) + ^ + [a-z]+ [^a-z0-9] # DoW, separator + (\d+|[a-z]+) [^a-z0-9] # Month or day + (\d+|[a-z]+) [^a-z0-9] # Month or day + (\d+) [^a-z0-9] (\d+) [^a-z0-9] (\d+) # Time + (?: \.(\d+) )? # Micros + [^a-z0-9] (\d+) # Year + $ + """ + ) + + _ORDER_YMD = 0 + _ORDER_DMY = 1 + _ORDER_MDY = 2 + _ORDER_PGDM = 3 + _ORDER_PGMD = 4 + + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + super().__init__(oid, context) + + ds = _get_datestyle(self.connection) + if ds.startswith(b"I"): # ISO + self._order = self._ORDER_YMD + elif ds.startswith(b"G"): # German + self._order = self._ORDER_DMY + elif ds.startswith(b"S"): # SQL + self._order = self._ORDER_DMY if ds.endswith(b"DMY") else self._ORDER_MDY + elif ds.startswith(b"P"): # Postgres + self._order = self._ORDER_PGDM if ds.endswith(b"DMY") else self._ORDER_PGMD + self._re_format = self._re_format_pg + else: + raise InterfaceError(f"unexpected DateStyle: {ds.decode('ascii')}") + + def load(self, data: Buffer) -> datetime: + m = self._re_format.match(data) + if not m: + raise _get_timestamp_load_error(self.connection, data) from None + + if self._order == self._ORDER_YMD: + ye, mo, da, ho, mi, se, fr = m.groups() + imo = int(mo) + elif self._order == self._ORDER_DMY: + da, mo, ye, ho, mi, se, fr = m.groups() + imo = int(mo) + elif self._order == self._ORDER_MDY: + mo, da, ye, ho, mi, se, fr = m.groups() + imo = int(mo) + else: + if self._order == self._ORDER_PGDM: + da, mo, ho, mi, se, fr, ye = m.groups() + else: + mo, da, ho, mi, se, fr, ye = m.groups() + try: + imo = _month_abbr[mo] + except KeyError: + s = mo.decode("utf8", "replace") + raise DataError(f"can't parse month: {s!r}") from None + + # Pad the fraction of second to get the micros + if fr: + us = int(fr) + if len(fr) < 6: + us *= _uspad[len(fr)] + else: + us = 0 + + try: + return datetime(int(ye), imo, int(da), int(ho), int(mi), int(se), us) + except ValueError as ex: + raise _get_timestamp_load_error(self.connection, data, ex) from None + + +class TimestampBinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> datetime: + micros = unpack_int8(data)[0] + try: + return _pg_datetime_epoch + timedelta(microseconds=micros) + except OverflowError: + if micros <= 0: + raise DataError("timestamp too small (before year 1)") from None + else: + raise DataError("timestamp too large (after year 10K)") from None + + +class TimestamptzLoader(Loader): + _re_format = re.compile( + rb"""(?ix) + ^ + (\d+) [^a-z0-9] (\d+) [^a-z0-9] (\d+) # Date + (?: T | [^a-z0-9] ) # Separator, including T + (\d+) [^a-z0-9] (\d+) [^a-z0-9] (\d+) # Time + (?: \.(\d+) )? # Micros + ([-+]) (\d+) (?: : (\d+) )? (?: : (\d+) )? # Timezone + $ + """ + ) + + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + super().__init__(oid, context) + self._timezone = get_tzinfo(self.connection.pgconn if self.connection else None) + + ds = _get_datestyle(self.connection) + if not ds.startswith(b"I"): # not ISO + setattr(self, "load", self._load_notimpl) + + def load(self, data: Buffer) -> datetime: + m = self._re_format.match(data) + if not m: + raise _get_timestamp_load_error(self.connection, data) from None + + ye, mo, da, ho, mi, se, fr, sgn, oh, om, os = m.groups() + + # Pad the fraction of second to get the micros + if fr: + us = int(fr) + if len(fr) < 6: + us *= _uspad[len(fr)] + else: + us = 0 + + # Calculate timezone offset + soff = 60 * 60 * int(oh) + if om: + soff += 60 * int(om) + if os: + soff += int(os) + tzoff = timedelta(0, soff if sgn == b"+" else -soff) + + # The return value is a datetime with the timezone of the connection + # (in order to be consistent with the binary loader, which is the only + # thing it can return). So create a temporary datetime object, in utc, + # shift it by the offset parsed from the timestamp, and then move it to + # the connection timezone. + dt = None + ex: Exception + try: + dt = datetime(int(ye), int(mo), int(da), int(ho), int(mi), int(se), us, utc) + return (dt - tzoff).astimezone(self._timezone) + except OverflowError as e: + # If we have created the temporary 'dt' it means that we have a + # datetime close to max, the shift pushed it past max, overflowing. + # In this case return the datetime in a fixed offset timezone. + if dt is not None: + return dt.replace(tzinfo=timezone(tzoff)) + else: + ex = e + except ValueError as e: + ex = e + + raise _get_timestamp_load_error(self.connection, data, ex) from None + + def _load_notimpl(self, data: Buffer) -> datetime: + s = bytes(data).decode("utf8", "replace") + ds = _get_datestyle(self.connection).decode("ascii") + raise NotImplementedError( + f"can't parse timestamptz with DateStyle {ds!r}: {s!r}" + ) + + +class TimestamptzBinaryLoader(Loader): + format = Format.BINARY + + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + super().__init__(oid, context) + self._timezone = get_tzinfo(self.connection.pgconn if self.connection else None) + + def load(self, data: Buffer) -> datetime: + micros = unpack_int8(data)[0] + try: + ts = _pg_datetimetz_epoch + timedelta(microseconds=micros) + return ts.astimezone(self._timezone) + except OverflowError: + # If we were asked about a timestamp which would overflow in UTC, + # but not in the desired timezone (e.g. datetime.max at Chicago + # timezone) we can still save the day by shifting the value by the + # timezone offset and then replacing the timezone. + if self._timezone: + utcoff = self._timezone.utcoffset( + datetime.min if micros < 0 else datetime.max + ) + if utcoff: + usoff = 1_000_000 * int(utcoff.total_seconds()) + try: + ts = _pg_datetime_epoch + timedelta(microseconds=micros + usoff) + except OverflowError: + pass # will raise downstream + else: + return ts.replace(tzinfo=self._timezone) + + if micros <= 0: + raise DataError("timestamp too small (before year 1)") from None + else: + raise DataError("timestamp too large (after year 10K)") from None + + +class IntervalLoader(Loader): + _re_interval = re.compile( + rb""" + (?: ([-+]?\d+) \s+ years? \s* )? # Years + (?: ([-+]?\d+) \s+ mons? \s* )? # Months + (?: ([-+]?\d+) \s+ days? \s* )? # Days + (?: ([-+])? (\d+) : (\d+) : (\d+ (?:\.\d+)?) # Time + )? + """, + re.VERBOSE, + ) + + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + super().__init__(oid, context) + if self.connection: + ints = self.connection.pgconn.parameter_status(b"IntervalStyle") + if ints != b"postgres": + setattr(self, "load", self._load_notimpl) + + def load(self, data: Buffer) -> timedelta: + m = self._re_interval.match(data) + if not m: + s = bytes(data).decode("utf8", "replace") + raise DataError(f"can't parse interval {s!r}") + + ye, mo, da, sgn, ho, mi, se = m.groups() + days = 0 + seconds = 0.0 + + if ye: + days += 365 * int(ye) + if mo: + days += 30 * int(mo) + if da: + days += int(da) + + if ho: + seconds = 3600 * int(ho) + 60 * int(mi) + float(se) + if sgn == b"-": + seconds = -seconds + + try: + return timedelta(days=days, seconds=seconds) + except OverflowError as e: + s = bytes(data).decode("utf8", "replace") + raise DataError(f"can't parse interval {s!r}: {e}") from None + + def _load_notimpl(self, data: Buffer) -> timedelta: + s = bytes(data).decode("utf8", "replace") + ints = ( + self.connection + and self.connection.pgconn.parameter_status(b"IntervalStyle") + or b"unknown" + ).decode("utf8", "replace") + raise NotImplementedError( + f"can't parse interval with IntervalStyle {ints}: {s!r}" + ) + + +class IntervalBinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> timedelta: + micros, days, months = _unpack_interval(data) + if months > 0: + years, months = divmod(months, 12) + days = days + 30 * months + 365 * years + elif months < 0: + years, months = divmod(-months, 12) + days = days - 30 * months - 365 * years + + try: + return timedelta(days=days, microseconds=micros) + except OverflowError as e: + raise DataError(f"can't parse interval: {e}") from None + + +def _get_datestyle(conn: Optional["BaseConnection[Any]"]) -> bytes: + if conn: + ds = conn.pgconn.parameter_status(b"DateStyle") + if ds: + return ds + + return b"ISO, DMY" + + +def _get_timestamp_load_error( + conn: Optional["BaseConnection[Any]"], data: Buffer, ex: Optional[Exception] = None +) -> Exception: + s = bytes(data).decode("utf8", "replace") + + def is_overflow(s: str) -> bool: + if not s: + return False + + ds = _get_datestyle(conn) + if not ds.startswith(b"P"): # Postgres + return len(s.split()[0]) > 10 # date is first token + else: + return len(s.split()[-1]) > 4 # year is last token + + if s == "-infinity" or s.endswith("BC"): + return DataError("timestamp too small (before year 1): {s!r}") + elif s == "infinity" or is_overflow(s): + return DataError(f"timestamp too large (after year 10K): {s!r}") + else: + return DataError(f"can't parse timestamp {s!r}: {ex or '(unknown)'}") + + +_month_abbr = { + n: i + for i, n in enumerate(b"Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split(), 1) +} + +# Pad to get microseconds from a fraction of seconds +_uspad = [0, 100_000, 10_000, 1_000, 100, 10, 1] + + +def register_default_adapters(context: AdaptContext) -> None: + adapters = context.adapters + adapters.register_dumper("datetime.date", DateDumper) + adapters.register_dumper("datetime.date", DateBinaryDumper) + + # first register dumpers for 'timetz' oid, then the proper ones on time type. + adapters.register_dumper("datetime.time", TimeTzDumper) + adapters.register_dumper("datetime.time", TimeTzBinaryDumper) + adapters.register_dumper("datetime.time", TimeDumper) + adapters.register_dumper("datetime.time", TimeBinaryDumper) + + # first register dumpers for 'timestamp' oid, then the proper ones + # on the datetime type. + adapters.register_dumper("datetime.datetime", DatetimeNoTzDumper) + adapters.register_dumper("datetime.datetime", DatetimeNoTzBinaryDumper) + adapters.register_dumper("datetime.datetime", DatetimeDumper) + adapters.register_dumper("datetime.datetime", DatetimeBinaryDumper) + + adapters.register_dumper("datetime.timedelta", TimedeltaDumper) + adapters.register_dumper("datetime.timedelta", TimedeltaBinaryDumper) + + adapters.register_loader("date", DateLoader) + adapters.register_loader("date", DateBinaryLoader) + adapters.register_loader("time", TimeLoader) + adapters.register_loader("time", TimeBinaryLoader) + adapters.register_loader("timetz", TimetzLoader) + adapters.register_loader("timetz", TimetzBinaryLoader) + adapters.register_loader("timestamp", TimestampLoader) + adapters.register_loader("timestamp", TimestampBinaryLoader) + adapters.register_loader("timestamptz", TimestamptzLoader) + adapters.register_loader("timestamptz", TimestamptzBinaryLoader) + adapters.register_loader("interval", IntervalLoader) + adapters.register_loader("interval", IntervalBinaryLoader) diff --git a/lib/python3.11/site-packages/psycopg/types/enum.py b/lib/python3.11/site-packages/psycopg/types/enum.py new file mode 100644 index 0000000..d3c7387 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/enum.py @@ -0,0 +1,177 @@ +""" +Adapters for the enum type. +""" +from enum import Enum +from typing import Any, Dict, Generic, Optional, Mapping, Sequence +from typing import Tuple, Type, TypeVar, Union, cast +from typing_extensions import TypeAlias + +from .. import postgres +from .. import errors as e +from ..pq import Format +from ..abc import AdaptContext +from ..adapt import Buffer, Dumper, Loader +from .._encodings import conn_encoding +from .._typeinfo import EnumInfo as EnumInfo # exported here + +E = TypeVar("E", bound=Enum) + +EnumDumpMap: TypeAlias = Dict[E, bytes] +EnumLoadMap: TypeAlias = Dict[bytes, E] +EnumMapping: TypeAlias = Union[Mapping[E, str], Sequence[Tuple[E, str]], None] + + +class _BaseEnumLoader(Loader, Generic[E]): + """ + Loader for a specific Enum class + """ + + enum: Type[E] + _load_map: EnumLoadMap[E] + + def load(self, data: Buffer) -> E: + if not isinstance(data, bytes): + data = bytes(data) + + try: + return self._load_map[data] + except KeyError: + enc = conn_encoding(self.connection) + label = data.decode(enc, "replace") + raise e.DataError( + f"bad member for enum {self.enum.__qualname__}: {label!r}" + ) + + +class _BaseEnumDumper(Dumper, Generic[E]): + """ + Dumper for a specific Enum class + """ + + enum: Type[E] + _dump_map: EnumDumpMap[E] + + def dump(self, value: E) -> Buffer: + return self._dump_map[value] + + +class EnumDumper(Dumper): + """ + Dumper for a generic Enum class + """ + + def __init__(self, cls: type, context: Optional[AdaptContext] = None): + super().__init__(cls, context) + self._encoding = conn_encoding(self.connection) + + def dump(self, value: E) -> Buffer: + return value.name.encode(self._encoding) + + +class EnumBinaryDumper(EnumDumper): + format = Format.BINARY + + +def register_enum( + info: EnumInfo, + context: Optional[AdaptContext] = None, + enum: Optional[Type[E]] = None, + *, + mapping: EnumMapping[E] = None, +) -> None: + """Register the adapters to load and dump a enum type. + + :param info: The object with the information about the enum to register. + :param context: The context where to register the adapters. If `!None`, + register it globally. + :param enum: Python enum type matching to the PostgreSQL one. If `!None`, + a new enum will be generated and exposed as `EnumInfo.enum`. + :param mapping: Override the mapping between `!enum` members and `!info` + labels. + """ + + if not info: + raise TypeError("no info passed. Is the requested enum available?") + + if enum is None: + enum = cast(Type[E], Enum(info.name.title(), info.labels, module=__name__)) + + info.enum = enum + adapters = context.adapters if context else postgres.adapters + info.register(context) + + load_map = _make_load_map(info, enum, mapping, context) + attribs: Dict[str, Any] = {"enum": info.enum, "_load_map": load_map} + + name = f"{info.name.title()}Loader" + loader = type(name, (_BaseEnumLoader,), attribs) + adapters.register_loader(info.oid, loader) + + name = f"{info.name.title()}BinaryLoader" + loader = type(name, (_BaseEnumLoader,), {**attribs, "format": Format.BINARY}) + adapters.register_loader(info.oid, loader) + + dump_map = _make_dump_map(info, enum, mapping, context) + attribs = {"oid": info.oid, "enum": info.enum, "_dump_map": dump_map} + + name = f"{enum.__name__}Dumper" + dumper = type(name, (_BaseEnumDumper,), attribs) + adapters.register_dumper(info.enum, dumper) + + name = f"{enum.__name__}BinaryDumper" + dumper = type(name, (_BaseEnumDumper,), {**attribs, "format": Format.BINARY}) + adapters.register_dumper(info.enum, dumper) + + +def _make_load_map( + info: EnumInfo, + enum: Type[E], + mapping: EnumMapping[E], + context: Optional[AdaptContext], +) -> EnumLoadMap[E]: + enc = conn_encoding(context.connection if context else None) + rv: EnumLoadMap[E] = {} + for label in info.labels: + try: + member = enum[label] + except KeyError: + # tolerate a missing enum, assuming it won't be used. If it is we + # will get a DataError on fetch. + pass + else: + rv[label.encode(enc)] = member + + if mapping: + if isinstance(mapping, Mapping): + mapping = list(mapping.items()) + + for member, label in mapping: + rv[label.encode(enc)] = member + + return rv + + +def _make_dump_map( + info: EnumInfo, + enum: Type[E], + mapping: EnumMapping[E], + context: Optional[AdaptContext], +) -> EnumDumpMap[E]: + enc = conn_encoding(context.connection if context else None) + rv: EnumDumpMap[E] = {} + for member in enum: + rv[member] = member.name.encode(enc) + + if mapping: + if isinstance(mapping, Mapping): + mapping = list(mapping.items()) + + for member, label in mapping: + rv[member] = label.encode(enc) + + return rv + + +def register_default_adapters(context: AdaptContext) -> None: + context.adapters.register_dumper(Enum, EnumBinaryDumper) + context.adapters.register_dumper(Enum, EnumDumper) diff --git a/lib/python3.11/site-packages/psycopg/types/hstore.py b/lib/python3.11/site-packages/psycopg/types/hstore.py new file mode 100644 index 0000000..c3935d6 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/hstore.py @@ -0,0 +1,130 @@ +""" +Dict to hstore adaptation +""" + +# Copyright (C) 2021 The Psycopg Team + +import re +from typing import Dict, List, Optional +from typing_extensions import TypeAlias + +from .. import errors as e +from .. import postgres +from ..abc import Buffer, AdaptContext +from ..adapt import PyFormat, RecursiveDumper, RecursiveLoader +from ..postgres import TEXT_OID +from .._typeinfo import TypeInfo + +_re_escape = re.compile(r'(["\\])') +_re_unescape = re.compile(r"\\(.)") + +_re_hstore = re.compile( + r""" + # hstore key: + # a string of normal or escaped chars + "((?: [^"\\] | \\. )*)" + \s*=>\s* # hstore value + (?: + NULL # the value can be null - not caught + # or a quoted string like the key + | "((?: [^"\\] | \\. )*)" + ) + (?:\s*,\s*|$) # pairs separated by comma or end of string. +""", + re.VERBOSE, +) + + +Hstore: TypeAlias = Dict[str, Optional[str]] + + +class BaseHstoreDumper(RecursiveDumper): + def dump(self, obj: Hstore) -> Buffer: + if not obj: + return b"" + + tokens: List[str] = [] + + def add_token(s: str) -> None: + tokens.append('"') + tokens.append(_re_escape.sub(r"\\\1", s)) + tokens.append('"') + + for k, v in obj.items(): + if not isinstance(k, str): + raise e.DataError("hstore keys can only be strings") + add_token(k) + + tokens.append("=>") + + if v is None: + tokens.append("NULL") + elif not isinstance(v, str): + raise e.DataError("hstore keys can only be strings") + else: + add_token(v) + + tokens.append(",") + + del tokens[-1] + data = "".join(tokens) + dumper = self._tx.get_dumper(data, PyFormat.TEXT) + return dumper.dump(data) + + +class HstoreLoader(RecursiveLoader): + def load(self, data: Buffer) -> Hstore: + loader = self._tx.get_loader(TEXT_OID, self.format) + s: str = loader.load(data) + + rv: Hstore = {} + start = 0 + for m in _re_hstore.finditer(s): + if m is None or m.start() != start: + raise e.DataError(f"error parsing hstore pair at char {start}") + k = _re_unescape.sub(r"\1", m.group(1)) + v = m.group(2) + if v is not None: + v = _re_unescape.sub(r"\1", v) + + rv[k] = v + start = m.end() + + if start < len(s): + raise e.DataError(f"error parsing hstore: unparsed data after char {start}") + + return rv + + +def register_hstore(info: TypeInfo, context: Optional[AdaptContext] = None) -> None: + """Register the adapters to load and dump hstore. + + :param info: The object with the information about the hstore type. + :param context: The context where to register the adapters. If `!None`, + register it globally. + + .. note:: + + Registering the adapters doesn't affect objects already created, even + if they are children of the registered context. For instance, + registering the adapter globally doesn't affect already existing + connections. + """ + # A friendly error warning instead of an AttributeError in case fetch() + # failed and it wasn't noticed. + if not info: + raise TypeError("no info passed. Is the 'hstore' extension loaded?") + + # Register arrays and type info + info.register(context) + + adapters = context.adapters if context else postgres.adapters + + # Generate and register a customized text dumper + class HstoreDumper(BaseHstoreDumper): + oid = info.oid + + adapters.register_dumper(dict, HstoreDumper) + + # register the text loader on the oid + adapters.register_loader(info.oid, HstoreLoader) diff --git a/lib/python3.11/site-packages/psycopg/types/json.py b/lib/python3.11/site-packages/psycopg/types/json.py new file mode 100644 index 0000000..b3323cf --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/json.py @@ -0,0 +1,228 @@ +""" +Adapers for JSON types. +""" + +# Copyright (C) 2020 The Psycopg Team + +import json +from typing import Any, Callable, Dict, Optional, Tuple, Type, Union + +from .. import abc +from .. import errors as e +from .. import postgres +from ..pq import Format +from ..adapt import Buffer, Dumper, Loader, PyFormat, AdaptersMap +from ..errors import DataError + +JsonDumpsFunction = Callable[[Any], str] +JsonLoadsFunction = Callable[[Union[str, bytes]], Any] + + +def set_json_dumps( + dumps: JsonDumpsFunction, context: Optional[abc.AdaptContext] = None +) -> None: + """ + Set the JSON serialisation function to store JSON objects in the database. + + :param dumps: The dump function to use. + :type dumps: `!Callable[[Any], str]` + :param context: Where to use the `!dumps` function. If not specified, use it + globally. + :type context: `~psycopg.Connection` or `~psycopg.Cursor` + + By default dumping JSON uses the builtin `json.dumps`. You can override + it to use a different JSON library or to use customised arguments. + + If the `Json` wrapper specified a `!dumps` function, use it in precedence + of the one set by this function. + """ + if context is None: + # If changing load function globally, just change the default on the + # global class + _JsonDumper._dumps = dumps + else: + adapters = context.adapters + + # If the scope is smaller than global, create subclassess and register + # them in the appropriate scope. + grid = [ + (Json, PyFormat.BINARY), + (Json, PyFormat.TEXT), + (Jsonb, PyFormat.BINARY), + (Jsonb, PyFormat.TEXT), + ] + dumper: Type[_JsonDumper] + for wrapper, format in grid: + base = _get_current_dumper(adapters, wrapper, format) + name = base.__name__ + if not base.__name__.startswith("Custom"): + name = f"Custom{name}" + dumper = type(name, (base,), {"_dumps": dumps}) + adapters.register_dumper(wrapper, dumper) + + +def set_json_loads( + loads: JsonLoadsFunction, context: Optional[abc.AdaptContext] = None +) -> None: + """ + Set the JSON parsing function to fetch JSON objects from the database. + + :param loads: The load function to use. + :type loads: `!Callable[[bytes], Any]` + :param context: Where to use the `!loads` function. If not specified, use + it globally. + :type context: `~psycopg.Connection` or `~psycopg.Cursor` + + By default loading JSON uses the builtin `json.loads`. You can override + it to use a different JSON library or to use customised arguments. + """ + if context is None: + # If changing load function globally, just change the default on the + # global class + _JsonLoader._loads = loads + else: + # If the scope is smaller than global, create subclassess and register + # them in the appropriate scope. + grid = [ + ("json", JsonLoader), + ("json", JsonBinaryLoader), + ("jsonb", JsonbLoader), + ("jsonb", JsonbBinaryLoader), + ] + loader: Type[_JsonLoader] + for tname, base in grid: + loader = type(f"Custom{base.__name__}", (base,), {"_loads": loads}) + context.adapters.register_loader(tname, loader) + + +class _JsonWrapper: + __slots__ = ("obj", "dumps") + + def __init__(self, obj: Any, dumps: Optional[JsonDumpsFunction] = None): + self.obj = obj + self.dumps = dumps + + def __repr__(self) -> str: + sobj = repr(self.obj) + if len(sobj) > 40: + sobj = f"{sobj[:35]} ... ({len(sobj)} chars)" + return f"{self.__class__.__name__}({sobj})" + + +class Json(_JsonWrapper): + __slots__ = () + + +class Jsonb(_JsonWrapper): + __slots__ = () + + +class _JsonDumper(Dumper): + # The globally used JSON dumps() function. It can be changed globally (by + # set_json_dumps) or by a subclass. + _dumps: JsonDumpsFunction = json.dumps + + def __init__(self, cls: type, context: Optional[abc.AdaptContext] = None): + super().__init__(cls, context) + self.dumps = self.__class__._dumps + + def dump(self, obj: Any) -> bytes: + if isinstance(obj, _JsonWrapper): + dumps = obj.dumps or self.dumps + obj = obj.obj + else: + dumps = self.dumps + return dumps(obj).encode() + + +class JsonDumper(_JsonDumper): + oid = postgres.types["json"].oid + + +class JsonBinaryDumper(_JsonDumper): + format = Format.BINARY + oid = postgres.types["json"].oid + + +class JsonbDumper(_JsonDumper): + oid = postgres.types["jsonb"].oid + + +class JsonbBinaryDumper(_JsonDumper): + format = Format.BINARY + oid = postgres.types["jsonb"].oid + + def dump(self, obj: Any) -> bytes: + return b"\x01" + super().dump(obj) + + +class _JsonLoader(Loader): + # The globally used JSON loads() function. It can be changed globally (by + # set_json_loads) or by a subclass. + _loads: JsonLoadsFunction = json.loads + + def __init__(self, oid: int, context: Optional[abc.AdaptContext] = None): + super().__init__(oid, context) + self.loads = self.__class__._loads + + def load(self, data: Buffer) -> Any: + # json.loads() cannot work on memoryview. + if not isinstance(data, bytes): + data = bytes(data) + return self.loads(data) + + +class JsonLoader(_JsonLoader): + pass + + +class JsonbLoader(_JsonLoader): + pass + + +class JsonBinaryLoader(_JsonLoader): + format = Format.BINARY + + +class JsonbBinaryLoader(_JsonLoader): + format = Format.BINARY + + def load(self, data: Buffer) -> Any: + if data and data[0] != 1: + raise DataError("unknown jsonb binary format: {data[0]}") + data = data[1:] + if not isinstance(data, bytes): + data = bytes(data) + return self.loads(data) + + +def _get_current_dumper( + adapters: AdaptersMap, cls: type, format: PyFormat +) -> Type[abc.Dumper]: + try: + return adapters.get_dumper(cls, format) + except e.ProgrammingError: + return _default_dumpers[cls, format] + + +_default_dumpers: Dict[Tuple[Type[_JsonWrapper], PyFormat], Type[Dumper]] = { + (Json, PyFormat.BINARY): JsonBinaryDumper, + (Json, PyFormat.TEXT): JsonDumper, + (Jsonb, PyFormat.BINARY): JsonbBinaryDumper, + (Jsonb, PyFormat.TEXT): JsonDumper, +} + + +def register_default_adapters(context: abc.AdaptContext) -> None: + adapters = context.adapters + + # Currently json binary format is nothing different than text, maybe with + # an extra memcopy we can avoid. + adapters.register_dumper(Json, JsonBinaryDumper) + adapters.register_dumper(Json, JsonDumper) + adapters.register_dumper(Jsonb, JsonbBinaryDumper) + adapters.register_dumper(Jsonb, JsonbDumper) + adapters.register_loader("json", JsonLoader) + adapters.register_loader("jsonb", JsonbLoader) + adapters.register_loader("json", JsonBinaryLoader) + adapters.register_loader("jsonb", JsonbBinaryLoader) diff --git a/lib/python3.11/site-packages/psycopg/types/multirange.py b/lib/python3.11/site-packages/psycopg/types/multirange.py new file mode 100644 index 0000000..c893148 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/multirange.py @@ -0,0 +1,511 @@ +""" +Support for multirange types adaptation. +""" + +# Copyright (C) 2021 The Psycopg Team + +from decimal import Decimal +from typing import Any, Generic, List, Iterable +from typing import MutableSequence, Optional, Type, Union, overload +from datetime import date, datetime + +from .. import errors as e +from .. import postgres +from ..pq import Format +from ..abc import AdaptContext, Buffer, Dumper, DumperKey +from ..adapt import RecursiveDumper, RecursiveLoader, PyFormat +from .._struct import pack_len, unpack_len +from ..postgres import INVALID_OID, TEXT_OID +from .._typeinfo import MultirangeInfo as MultirangeInfo # exported here + +from .range import Range, T, load_range_text, load_range_binary +from .range import dump_range_text, dump_range_binary, fail_dump + + +class Multirange(MutableSequence[Range[T]]): + """Python representation for a PostgreSQL multirange type. + + :param items: Sequence of ranges to initialise the object. + """ + + def __init__(self, items: Iterable[Range[T]] = ()): + self._ranges: List[Range[T]] = list(map(self._check_type, items)) + + def _check_type(self, item: Any) -> Range[Any]: + if not isinstance(item, Range): + raise TypeError( + f"Multirange is a sequence of Range, got {type(item).__name__}" + ) + return item + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self._ranges!r})" + + def __str__(self) -> str: + return f"{{{', '.join(map(str, self._ranges))}}}" + + @overload + def __getitem__(self, index: int) -> Range[T]: + ... + + @overload + def __getitem__(self, index: slice) -> "Multirange[T]": + ... + + def __getitem__(self, index: Union[int, slice]) -> "Union[Range[T],Multirange[T]]": + if isinstance(index, int): + return self._ranges[index] + else: + return Multirange(self._ranges[index]) + + def __len__(self) -> int: + return len(self._ranges) + + @overload + def __setitem__(self, index: int, value: Range[T]) -> None: + ... + + @overload + def __setitem__(self, index: slice, value: Iterable[Range[T]]) -> None: + ... + + def __setitem__( + self, + index: Union[int, slice], + value: Union[Range[T], Iterable[Range[T]]], + ) -> None: + if isinstance(index, int): + self._check_type(value) + self._ranges[index] = self._check_type(value) + elif not isinstance(value, Iterable): + raise TypeError("can only assign an iterable") + else: + value = map(self._check_type, value) + self._ranges[index] = value + + def __delitem__(self, index: Union[int, slice]) -> None: + del self._ranges[index] + + def insert(self, index: int, value: Range[T]) -> None: + self._ranges.insert(index, self._check_type(value)) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Multirange): + return False + return self._ranges == other._ranges + + # Order is arbitrary but consistent + + def __lt__(self, other: Any) -> bool: + if not isinstance(other, Multirange): + return NotImplemented + return self._ranges < other._ranges + + def __le__(self, other: Any) -> bool: + return self == other or self < other # type: ignore + + def __gt__(self, other: Any) -> bool: + if not isinstance(other, Multirange): + return NotImplemented + return self._ranges > other._ranges + + def __ge__(self, other: Any) -> bool: + return self == other or self > other # type: ignore + + +# Subclasses to specify a specific subtype. Usually not needed + + +class Int4Multirange(Multirange[int]): + pass + + +class Int8Multirange(Multirange[int]): + pass + + +class NumericMultirange(Multirange[Decimal]): + pass + + +class DateMultirange(Multirange[date]): + pass + + +class TimestampMultirange(Multirange[datetime]): + pass + + +class TimestamptzMultirange(Multirange[datetime]): + pass + + +class BaseMultirangeDumper(RecursiveDumper): + def __init__(self, cls: type, context: Optional[AdaptContext] = None): + super().__init__(cls, context) + self.sub_dumper: Optional[Dumper] = None + self._adapt_format = PyFormat.from_pq(self.format) + + def get_key(self, obj: Multirange[Any], format: PyFormat) -> DumperKey: + # If we are a subclass whose oid is specified we don't need upgrade + if self.cls is not Multirange: + return self.cls + + item = self._get_item(obj) + if item is not None: + sd = self._tx.get_dumper(item, self._adapt_format) + return (self.cls, sd.get_key(item, format)) + else: + return (self.cls,) + + def upgrade(self, obj: Multirange[Any], format: PyFormat) -> "BaseMultirangeDumper": + # If we are a subclass whose oid is specified we don't need upgrade + if self.cls is not Multirange: + return self + + item = self._get_item(obj) + if item is None: + return MultirangeDumper(self.cls) + + dumper: BaseMultirangeDumper + if type(item) is int: + # postgres won't cast int4range -> int8range so we must use + # text format and unknown oid here + sd = self._tx.get_dumper(item, PyFormat.TEXT) + dumper = MultirangeDumper(self.cls, self._tx) + dumper.sub_dumper = sd + dumper.oid = INVALID_OID + return dumper + + sd = self._tx.get_dumper(item, format) + dumper = type(self)(self.cls, self._tx) + dumper.sub_dumper = sd + if sd.oid == INVALID_OID and isinstance(item, str): + # Work around the normal mapping where text is dumped as unknown + dumper.oid = self._get_multirange_oid(TEXT_OID) + else: + dumper.oid = self._get_multirange_oid(sd.oid) + + return dumper + + def _get_item(self, obj: Multirange[Any]) -> Any: + """ + Return a member representative of the multirange + """ + for r in obj: + if r.lower is not None: + return r.lower + if r.upper is not None: + return r.upper + return None + + def _get_multirange_oid(self, sub_oid: int) -> int: + """ + Return the oid of the range from the oid of its elements. + """ + info = self._tx.adapters.types.get_by_subtype(MultirangeInfo, sub_oid) + return info.oid if info else INVALID_OID + + +class MultirangeDumper(BaseMultirangeDumper): + """ + Dumper for multirange types. + + The dumper can upgrade to one specific for a different range type. + """ + + def dump(self, obj: Multirange[Any]) -> Buffer: + if not obj: + return b"{}" + + item = self._get_item(obj) + if item is not None: + dump = self._tx.get_dumper(item, self._adapt_format).dump + else: + dump = fail_dump + + out: List[Buffer] = [b"{"] + for r in obj: + out.append(dump_range_text(r, dump)) + out.append(b",") + out[-1] = b"}" + return b"".join(out) + + +class MultirangeBinaryDumper(BaseMultirangeDumper): + format = Format.BINARY + + def dump(self, obj: Multirange[Any]) -> Buffer: + item = self._get_item(obj) + if item is not None: + dump = self._tx.get_dumper(item, self._adapt_format).dump + else: + dump = fail_dump + + out: List[Buffer] = [pack_len(len(obj))] + for r in obj: + data = dump_range_binary(r, dump) + out.append(pack_len(len(data))) + out.append(data) + return b"".join(out) + + +class BaseMultirangeLoader(RecursiveLoader, Generic[T]): + subtype_oid: int + + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + super().__init__(oid, context) + self._load = self._tx.get_loader(self.subtype_oid, format=self.format).load + + +class MultirangeLoader(BaseMultirangeLoader[T]): + def load(self, data: Buffer) -> Multirange[T]: + if not data or data[0] != _START_INT: + raise e.DataError( + "malformed multirange starting with" + f" {bytes(data[:1]).decode('utf8', 'replace')}" + ) + + out = Multirange[T]() + if data == b"{}": + return out + + pos = 1 + data = data[pos:] + try: + while True: + r, pos = load_range_text(data, self._load) + out.append(r) + + sep = data[pos] # can raise IndexError + if sep == _SEP_INT: + data = data[pos + 1 :] + continue + elif sep == _END_INT: + if len(data) == pos + 1: + return out + else: + raise e.DataError( + "malformed multirange: data after closing brace" + ) + else: + raise e.DataError( + f"malformed multirange: found unexpected {chr(sep)}" + ) + + except IndexError: + raise e.DataError("malformed multirange: separator missing") + + return out + + +_SEP_INT = ord(",") +_START_INT = ord("{") +_END_INT = ord("}") + + +class MultirangeBinaryLoader(BaseMultirangeLoader[T]): + format = Format.BINARY + + def load(self, data: Buffer) -> Multirange[T]: + nelems = unpack_len(data, 0)[0] + pos = 4 + out = Multirange[T]() + for i in range(nelems): + length = unpack_len(data, pos)[0] + pos += 4 + out.append(load_range_binary(data[pos : pos + length], self._load)) + pos += length + + if pos != len(data): + raise e.DataError("unexpected trailing data in multirange") + + return out + + +def register_multirange( + info: MultirangeInfo, context: Optional[AdaptContext] = None +) -> None: + """Register the adapters to load and dump a multirange type. + + :param info: The object with the information about the range to register. + :param context: The context where to register the adapters. If `!None`, + register it globally. + + Register loaders so that loading data of this type will result in a `Range` + with bounds parsed as the right subtype. + + .. note:: + + Registering the adapters doesn't affect objects already created, even + if they are children of the registered context. For instance, + registering the adapter globally doesn't affect already existing + connections. + """ + # A friendly error warning instead of an AttributeError in case fetch() + # failed and it wasn't noticed. + if not info: + raise TypeError("no info passed. Is the requested multirange available?") + + # Register arrays and type info + info.register(context) + + adapters = context.adapters if context else postgres.adapters + + # generate and register a customized text loader + loader: Type[MultirangeLoader[Any]] = type( + f"{info.name.title()}Loader", + (MultirangeLoader,), + {"subtype_oid": info.subtype_oid}, + ) + adapters.register_loader(info.oid, loader) + + # generate and register a customized binary loader + bloader: Type[MultirangeBinaryLoader[Any]] = type( + f"{info.name.title()}BinaryLoader", + (MultirangeBinaryLoader,), + {"subtype_oid": info.subtype_oid}, + ) + adapters.register_loader(info.oid, bloader) + + +# Text dumpers for builtin multirange types wrappers +# These are registered on specific subtypes so that the upgrade mechanism +# doesn't kick in. + + +class Int4MultirangeDumper(MultirangeDumper): + oid = postgres.types["int4multirange"].oid + + +class Int8MultirangeDumper(MultirangeDumper): + oid = postgres.types["int8multirange"].oid + + +class NumericMultirangeDumper(MultirangeDumper): + oid = postgres.types["nummultirange"].oid + + +class DateMultirangeDumper(MultirangeDumper): + oid = postgres.types["datemultirange"].oid + + +class TimestampMultirangeDumper(MultirangeDumper): + oid = postgres.types["tsmultirange"].oid + + +class TimestamptzMultirangeDumper(MultirangeDumper): + oid = postgres.types["tstzmultirange"].oid + + +# Binary dumpers for builtin multirange types wrappers +# These are registered on specific subtypes so that the upgrade mechanism +# doesn't kick in. + + +class Int4MultirangeBinaryDumper(MultirangeBinaryDumper): + oid = postgres.types["int4multirange"].oid + + +class Int8MultirangeBinaryDumper(MultirangeBinaryDumper): + oid = postgres.types["int8multirange"].oid + + +class NumericMultirangeBinaryDumper(MultirangeBinaryDumper): + oid = postgres.types["nummultirange"].oid + + +class DateMultirangeBinaryDumper(MultirangeBinaryDumper): + oid = postgres.types["datemultirange"].oid + + +class TimestampMultirangeBinaryDumper(MultirangeBinaryDumper): + oid = postgres.types["tsmultirange"].oid + + +class TimestamptzMultirangeBinaryDumper(MultirangeBinaryDumper): + oid = postgres.types["tstzmultirange"].oid + + +# Text loaders for builtin multirange types + + +class Int4MultirangeLoader(MultirangeLoader[int]): + subtype_oid = postgres.types["int4"].oid + + +class Int8MultirangeLoader(MultirangeLoader[int]): + subtype_oid = postgres.types["int8"].oid + + +class NumericMultirangeLoader(MultirangeLoader[Decimal]): + subtype_oid = postgres.types["numeric"].oid + + +class DateMultirangeLoader(MultirangeLoader[date]): + subtype_oid = postgres.types["date"].oid + + +class TimestampMultirangeLoader(MultirangeLoader[datetime]): + subtype_oid = postgres.types["timestamp"].oid + + +class TimestampTZMultirangeLoader(MultirangeLoader[datetime]): + subtype_oid = postgres.types["timestamptz"].oid + + +# Binary loaders for builtin multirange types + + +class Int4MultirangeBinaryLoader(MultirangeBinaryLoader[int]): + subtype_oid = postgres.types["int4"].oid + + +class Int8MultirangeBinaryLoader(MultirangeBinaryLoader[int]): + subtype_oid = postgres.types["int8"].oid + + +class NumericMultirangeBinaryLoader(MultirangeBinaryLoader[Decimal]): + subtype_oid = postgres.types["numeric"].oid + + +class DateMultirangeBinaryLoader(MultirangeBinaryLoader[date]): + subtype_oid = postgres.types["date"].oid + + +class TimestampMultirangeBinaryLoader(MultirangeBinaryLoader[datetime]): + subtype_oid = postgres.types["timestamp"].oid + + +class TimestampTZMultirangeBinaryLoader(MultirangeBinaryLoader[datetime]): + subtype_oid = postgres.types["timestamptz"].oid + + +def register_default_adapters(context: AdaptContext) -> None: + adapters = context.adapters + adapters.register_dumper(Multirange, MultirangeBinaryDumper) + adapters.register_dumper(Multirange, MultirangeDumper) + adapters.register_dumper(Int4Multirange, Int4MultirangeDumper) + adapters.register_dumper(Int8Multirange, Int8MultirangeDumper) + adapters.register_dumper(NumericMultirange, NumericMultirangeDumper) + adapters.register_dumper(DateMultirange, DateMultirangeDumper) + adapters.register_dumper(TimestampMultirange, TimestampMultirangeDumper) + adapters.register_dumper(TimestamptzMultirange, TimestamptzMultirangeDumper) + adapters.register_dumper(Int4Multirange, Int4MultirangeBinaryDumper) + adapters.register_dumper(Int8Multirange, Int8MultirangeBinaryDumper) + adapters.register_dumper(NumericMultirange, NumericMultirangeBinaryDumper) + adapters.register_dumper(DateMultirange, DateMultirangeBinaryDumper) + adapters.register_dumper(TimestampMultirange, TimestampMultirangeBinaryDumper) + adapters.register_dumper(TimestamptzMultirange, TimestamptzMultirangeBinaryDumper) + adapters.register_loader("int4multirange", Int4MultirangeLoader) + adapters.register_loader("int8multirange", Int8MultirangeLoader) + adapters.register_loader("nummultirange", NumericMultirangeLoader) + adapters.register_loader("datemultirange", DateMultirangeLoader) + adapters.register_loader("tsmultirange", TimestampMultirangeLoader) + adapters.register_loader("tstzmultirange", TimestampTZMultirangeLoader) + adapters.register_loader("int4multirange", Int4MultirangeBinaryLoader) + adapters.register_loader("int8multirange", Int8MultirangeBinaryLoader) + adapters.register_loader("nummultirange", NumericMultirangeBinaryLoader) + adapters.register_loader("datemultirange", DateMultirangeBinaryLoader) + adapters.register_loader("tsmultirange", TimestampMultirangeBinaryLoader) + adapters.register_loader("tstzmultirange", TimestampTZMultirangeBinaryLoader) diff --git a/lib/python3.11/site-packages/psycopg/types/net.py b/lib/python3.11/site-packages/psycopg/types/net.py new file mode 100644 index 0000000..36b4053 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/net.py @@ -0,0 +1,201 @@ +""" +Adapters for network types. +""" + +# Copyright (C) 2020 The Psycopg Team + +from typing import Callable, Optional, Type, Union, TYPE_CHECKING +from typing_extensions import TypeAlias + +from .. import postgres +from ..pq import Format +from ..abc import AdaptContext +from ..adapt import Buffer, Dumper, Loader + +if TYPE_CHECKING: + import ipaddress + +Address: TypeAlias = Union["ipaddress.IPv4Address", "ipaddress.IPv6Address"] +Interface: TypeAlias = Union["ipaddress.IPv4Interface", "ipaddress.IPv6Interface"] +Network: TypeAlias = Union["ipaddress.IPv4Network", "ipaddress.IPv6Network"] + +# These objects will be imported lazily +ip_address: Callable[[str], Address] = None # type: ignore[assignment] +ip_interface: Callable[[str], Interface] = None # type: ignore[assignment] +ip_network: Callable[[str], Network] = None # type: ignore[assignment] +IPv4Address: "Type[ipaddress.IPv4Address]" = None # type: ignore[assignment] +IPv6Address: "Type[ipaddress.IPv6Address]" = None # type: ignore[assignment] +IPv4Interface: "Type[ipaddress.IPv4Interface]" = None # type: ignore[assignment] +IPv6Interface: "Type[ipaddress.IPv6Interface]" = None # type: ignore[assignment] +IPv4Network: "Type[ipaddress.IPv4Network]" = None # type: ignore[assignment] +IPv6Network: "Type[ipaddress.IPv6Network]" = None # type: ignore[assignment] + +PGSQL_AF_INET = 2 +PGSQL_AF_INET6 = 3 +IPV4_PREFIXLEN = 32 +IPV6_PREFIXLEN = 128 + + +class _LazyIpaddress: + def _ensure_module(self) -> None: + global ip_address, ip_interface, ip_network + global IPv4Address, IPv6Address, IPv4Interface, IPv6Interface + global IPv4Network, IPv6Network + + if ip_address is None: + from ipaddress import ip_address, ip_interface, ip_network + from ipaddress import IPv4Address, IPv6Address + from ipaddress import IPv4Interface, IPv6Interface + from ipaddress import IPv4Network, IPv6Network + + +class InterfaceDumper(Dumper): + oid = postgres.types["inet"].oid + + def dump(self, obj: Interface) -> bytes: + return str(obj).encode() + + +class NetworkDumper(Dumper): + oid = postgres.types["cidr"].oid + + def dump(self, obj: Network) -> bytes: + return str(obj).encode() + + +class _AIBinaryDumper(Dumper): + format = Format.BINARY + oid = postgres.types["inet"].oid + + +class AddressBinaryDumper(_AIBinaryDumper): + def dump(self, obj: Address) -> bytes: + packed = obj.packed + family = PGSQL_AF_INET if obj.version == 4 else PGSQL_AF_INET6 + head = bytes((family, obj.max_prefixlen, 0, len(packed))) + return head + packed + + +class InterfaceBinaryDumper(_AIBinaryDumper): + def dump(self, obj: Interface) -> bytes: + packed = obj.packed + family = PGSQL_AF_INET if obj.version == 4 else PGSQL_AF_INET6 + head = bytes((family, obj.network.prefixlen, 0, len(packed))) + return head + packed + + +class InetBinaryDumper(_AIBinaryDumper, _LazyIpaddress): + """Either an address or an interface to inet + + Used when looking up by oid. + """ + + def __init__(self, cls: type, context: Optional[AdaptContext] = None): + super().__init__(cls, context) + self._ensure_module() + + def dump(self, obj: Union[Address, Interface]) -> bytes: + packed = obj.packed + family = PGSQL_AF_INET if obj.version == 4 else PGSQL_AF_INET6 + if isinstance(obj, (IPv4Interface, IPv6Interface)): + prefixlen = obj.network.prefixlen + else: + prefixlen = obj.max_prefixlen + + head = bytes((family, prefixlen, 0, len(packed))) + return head + packed + + +class NetworkBinaryDumper(Dumper): + format = Format.BINARY + oid = postgres.types["cidr"].oid + + def dump(self, obj: Network) -> bytes: + packed = obj.network_address.packed + family = PGSQL_AF_INET if obj.version == 4 else PGSQL_AF_INET6 + head = bytes((family, obj.prefixlen, 1, len(packed))) + return head + packed + + +class _LazyIpaddressLoader(Loader, _LazyIpaddress): + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + super().__init__(oid, context) + self._ensure_module() + + +class InetLoader(_LazyIpaddressLoader): + def load(self, data: Buffer) -> Union[Address, Interface]: + if isinstance(data, memoryview): + data = bytes(data) + + if b"/" in data: + return ip_interface(data.decode()) + else: + return ip_address(data.decode()) + + +class InetBinaryLoader(_LazyIpaddressLoader): + format = Format.BINARY + + def load(self, data: Buffer) -> Union[Address, Interface]: + if isinstance(data, memoryview): + data = bytes(data) + + prefix = data[1] + packed = data[4:] + if data[0] == PGSQL_AF_INET: + if prefix == IPV4_PREFIXLEN: + return IPv4Address(packed) + else: + return IPv4Interface((packed, prefix)) + else: + if prefix == IPV6_PREFIXLEN: + return IPv6Address(packed) + else: + return IPv6Interface((packed, prefix)) + + +class CidrLoader(_LazyIpaddressLoader): + def load(self, data: Buffer) -> Network: + if isinstance(data, memoryview): + data = bytes(data) + + return ip_network(data.decode()) + + +class CidrBinaryLoader(_LazyIpaddressLoader): + format = Format.BINARY + + def load(self, data: Buffer) -> Network: + if isinstance(data, memoryview): + data = bytes(data) + + prefix = data[1] + packed = data[4:] + if data[0] == PGSQL_AF_INET: + return IPv4Network((packed, prefix)) + else: + return IPv6Network((packed, prefix)) + + return ip_network(data.decode()) + + +def register_default_adapters(context: AdaptContext) -> None: + adapters = context.adapters + adapters.register_dumper("ipaddress.IPv4Address", InterfaceDumper) + adapters.register_dumper("ipaddress.IPv6Address", InterfaceDumper) + adapters.register_dumper("ipaddress.IPv4Interface", InterfaceDumper) + adapters.register_dumper("ipaddress.IPv6Interface", InterfaceDumper) + adapters.register_dumper("ipaddress.IPv4Network", NetworkDumper) + adapters.register_dumper("ipaddress.IPv6Network", NetworkDumper) + adapters.register_dumper("ipaddress.IPv4Address", AddressBinaryDumper) + adapters.register_dumper("ipaddress.IPv6Address", AddressBinaryDumper) + adapters.register_dumper("ipaddress.IPv4Interface", InterfaceBinaryDumper) + adapters.register_dumper("ipaddress.IPv6Interface", InterfaceBinaryDumper) + adapters.register_dumper("ipaddress.IPv4Network", NetworkBinaryDumper) + adapters.register_dumper("ipaddress.IPv6Network", NetworkBinaryDumper) + adapters.register_dumper(None, InetBinaryDumper) + adapters.register_loader("inet", InetLoader) + adapters.register_loader("inet", InetBinaryLoader) + adapters.register_loader("cidr", CidrLoader) + adapters.register_loader("cidr", CidrBinaryLoader) diff --git a/lib/python3.11/site-packages/psycopg/types/none.py b/lib/python3.11/site-packages/psycopg/types/none.py new file mode 100644 index 0000000..2ab857c --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/none.py @@ -0,0 +1,25 @@ +""" +Adapters for None. +""" + +# Copyright (C) 2020 The Psycopg Team + +from ..abc import AdaptContext, NoneType +from ..adapt import Dumper + + +class NoneDumper(Dumper): + """ + Not a complete dumper as it doesn't implement dump(), but it implements + quote(), so it can be used in sql composition. + """ + + def dump(self, obj: None) -> bytes: + raise NotImplementedError("NULL is passed to Postgres in other ways") + + def quote(self, obj: None) -> bytes: + return b"NULL" + + +def register_default_adapters(context: AdaptContext) -> None: + context.adapters.register_dumper(NoneType, NoneDumper) diff --git a/lib/python3.11/site-packages/psycopg/types/numeric.py b/lib/python3.11/site-packages/psycopg/types/numeric.py new file mode 100644 index 0000000..0b9fb11 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/numeric.py @@ -0,0 +1,495 @@ +""" +Adapers for numeric types. +""" + +# Copyright (C) 2020 The Psycopg Team + +import struct +from math import log +from typing import Any, Callable, DefaultDict, Dict, Tuple, Union, cast +from decimal import Decimal, DefaultContext, Context + +from .. import postgres +from .. import errors as e +from ..pq import Format +from ..abc import AdaptContext +from ..adapt import Buffer, Dumper, Loader, PyFormat +from .._struct import pack_int2, pack_uint2, unpack_int2 +from .._struct import pack_int4, pack_uint4, unpack_int4, unpack_uint4 +from .._struct import pack_int8, unpack_int8 +from .._struct import pack_float4, pack_float8, unpack_float4, unpack_float8 + +# Exposed here +from .._wrappers import ( + Int2 as Int2, + Int4 as Int4, + Int8 as Int8, + IntNumeric as IntNumeric, + Oid as Oid, + Float4 as Float4, + Float8 as Float8, +) + + +class _IntDumper(Dumper): + def dump(self, obj: Any) -> Buffer: + t = type(obj) + if t is not int: + # Convert to int in order to dump IntEnum correctly + if issubclass(t, int): + obj = int(obj) + else: + raise e.DataError(f"integer expected, got {type(obj).__name__!r}") + + return str(obj).encode() + + def quote(self, obj: Any) -> Buffer: + value = self.dump(obj) + return value if obj >= 0 else b" " + value + + +class _SpecialValuesDumper(Dumper): + _special: Dict[bytes, bytes] = {} + + def dump(self, obj: Any) -> bytes: + return str(obj).encode() + + def quote(self, obj: Any) -> bytes: + value = self.dump(obj) + + if value in self._special: + return self._special[value] + + return value if obj >= 0 else b" " + value + + +class FloatDumper(_SpecialValuesDumper): + oid = postgres.types["float8"].oid + + _special = { + b"inf": b"'Infinity'::float8", + b"-inf": b"'-Infinity'::float8", + b"nan": b"'NaN'::float8", + } + + +class Float4Dumper(FloatDumper): + oid = postgres.types["float4"].oid + + +class FloatBinaryDumper(Dumper): + format = Format.BINARY + oid = postgres.types["float8"].oid + + def dump(self, obj: float) -> bytes: + return pack_float8(obj) + + +class Float4BinaryDumper(FloatBinaryDumper): + oid = postgres.types["float4"].oid + + def dump(self, obj: float) -> bytes: + return pack_float4(obj) + + +class DecimalDumper(_SpecialValuesDumper): + oid = postgres.types["numeric"].oid + + def dump(self, obj: Decimal) -> bytes: + if obj.is_nan(): + # cover NaN and sNaN + return b"NaN" + else: + return str(obj).encode() + + _special = { + b"Infinity": b"'Infinity'::numeric", + b"-Infinity": b"'-Infinity'::numeric", + b"NaN": b"'NaN'::numeric", + } + + +class Int2Dumper(_IntDumper): + oid = postgres.types["int2"].oid + + +class Int4Dumper(_IntDumper): + oid = postgres.types["int4"].oid + + +class Int8Dumper(_IntDumper): + oid = postgres.types["int8"].oid + + +class IntNumericDumper(_IntDumper): + oid = postgres.types["numeric"].oid + + +class OidDumper(_IntDumper): + oid = postgres.types["oid"].oid + + +class IntDumper(Dumper): + def dump(self, obj: Any) -> bytes: + raise TypeError( + f"{type(self).__name__} is a dispatcher to other dumpers:" + " dump() is not supposed to be called" + ) + + def get_key(self, obj: int, format: PyFormat) -> type: + return self.upgrade(obj, format).cls + + _int2_dumper = Int2Dumper(Int2) + _int4_dumper = Int4Dumper(Int4) + _int8_dumper = Int8Dumper(Int8) + _int_numeric_dumper = IntNumericDumper(IntNumeric) + + def upgrade(self, obj: int, format: PyFormat) -> Dumper: + if -(2**31) <= obj < 2**31: + if -(2**15) <= obj < 2**15: + return self._int2_dumper + else: + return self._int4_dumper + else: + if -(2**63) <= obj < 2**63: + return self._int8_dumper + else: + return self._int_numeric_dumper + + +class Int2BinaryDumper(Int2Dumper): + format = Format.BINARY + + def dump(self, obj: int) -> bytes: + return pack_int2(obj) + + +class Int4BinaryDumper(Int4Dumper): + format = Format.BINARY + + def dump(self, obj: int) -> bytes: + return pack_int4(obj) + + +class Int8BinaryDumper(Int8Dumper): + format = Format.BINARY + + def dump(self, obj: int) -> bytes: + return pack_int8(obj) + + +# Ratio between number of bits required to store a number and number of pg +# decimal digits required. +BIT_PER_PGDIGIT = log(2) / log(10_000) + + +class IntNumericBinaryDumper(IntNumericDumper): + format = Format.BINARY + + def dump(self, obj: int) -> Buffer: + return dump_int_to_numeric_binary(obj) + + +class OidBinaryDumper(OidDumper): + format = Format.BINARY + + def dump(self, obj: int) -> bytes: + return pack_uint4(obj) + + +class IntBinaryDumper(IntDumper): + format = Format.BINARY + + _int2_dumper = Int2BinaryDumper(Int2) + _int4_dumper = Int4BinaryDumper(Int4) + _int8_dumper = Int8BinaryDumper(Int8) + _int_numeric_dumper = IntNumericBinaryDumper(IntNumeric) + + +class IntLoader(Loader): + def load(self, data: Buffer) -> int: + # it supports bytes directly + return int(data) + + +class Int2BinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> int: + return unpack_int2(data)[0] + + +class Int4BinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> int: + return unpack_int4(data)[0] + + +class Int8BinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> int: + return unpack_int8(data)[0] + + +class OidBinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> int: + return unpack_uint4(data)[0] + + +class FloatLoader(Loader): + def load(self, data: Buffer) -> float: + # it supports bytes directly + return float(data) + + +class Float4BinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> float: + return unpack_float4(data)[0] + + +class Float8BinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> float: + return unpack_float8(data)[0] + + +class NumericLoader(Loader): + def load(self, data: Buffer) -> Decimal: + if isinstance(data, memoryview): + data = bytes(data) + return Decimal(data.decode()) + + +DEC_DIGITS = 4 # decimal digits per Postgres "digit" +NUMERIC_POS = 0x0000 +NUMERIC_NEG = 0x4000 +NUMERIC_NAN = 0xC000 +NUMERIC_PINF = 0xD000 +NUMERIC_NINF = 0xF000 + +_decimal_special = { + NUMERIC_NAN: Decimal("NaN"), + NUMERIC_PINF: Decimal("Infinity"), + NUMERIC_NINF: Decimal("-Infinity"), +} + + +class _ContextMap(DefaultDict[int, Context]): + """ + Cache for decimal contexts to use when the precision requires it. + + Note: if the default context is used (prec=28) you can get an invalid + operation or a rounding to 0: + + - Decimal(1000).shift(24) = Decimal('1000000000000000000000000000') + - Decimal(1000).shift(25) = Decimal('0') + - Decimal(1000).shift(30) raises InvalidOperation + """ + + def __missing__(self, key: int) -> Context: + val = Context(prec=key) + self[key] = val + return val + + +_contexts = _ContextMap() +for i in range(DefaultContext.prec): + _contexts[i] = DefaultContext + +_unpack_numeric_head = cast( + Callable[[Buffer], Tuple[int, int, int, int]], + struct.Struct("!HhHH").unpack_from, +) +_pack_numeric_head = cast( + Callable[[int, int, int, int], bytes], + struct.Struct("!HhHH").pack, +) + + +class NumericBinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> Decimal: + ndigits, weight, sign, dscale = _unpack_numeric_head(data) + if sign == NUMERIC_POS or sign == NUMERIC_NEG: + val = 0 + for i in range(8, len(data), 2): + val = val * 10_000 + data[i] * 0x100 + data[i + 1] + + shift = dscale - (ndigits - weight - 1) * DEC_DIGITS + ctx = _contexts[(weight + 2) * DEC_DIGITS + dscale] + return ( + Decimal(val if sign == NUMERIC_POS else -val) + .scaleb(-dscale, ctx) + .shift(shift, ctx) + ) + else: + try: + return _decimal_special[sign] + except KeyError: + raise e.DataError(f"bad value for numeric sign: 0x{sign:X}") from None + + +NUMERIC_NAN_BIN = _pack_numeric_head(0, 0, NUMERIC_NAN, 0) +NUMERIC_PINF_BIN = _pack_numeric_head(0, 0, NUMERIC_PINF, 0) +NUMERIC_NINF_BIN = _pack_numeric_head(0, 0, NUMERIC_NINF, 0) + + +class DecimalBinaryDumper(Dumper): + format = Format.BINARY + oid = postgres.types["numeric"].oid + + def dump(self, obj: Decimal) -> Buffer: + return dump_decimal_to_numeric_binary(obj) + + +class NumericDumper(DecimalDumper): + def dump(self, obj: Union[Decimal, int]) -> bytes: + if isinstance(obj, int): + return str(obj).encode() + else: + return super().dump(obj) + + +class NumericBinaryDumper(Dumper): + format = Format.BINARY + oid = postgres.types["numeric"].oid + + def dump(self, obj: Union[Decimal, int]) -> Buffer: + if isinstance(obj, int): + return dump_int_to_numeric_binary(obj) + else: + return dump_decimal_to_numeric_binary(obj) + + +def dump_decimal_to_numeric_binary(obj: Decimal) -> Union[bytearray, bytes]: + sign, digits, exp = obj.as_tuple() + if exp == "n" or exp == "N": + return NUMERIC_NAN_BIN + elif exp == "F": + return NUMERIC_NINF_BIN if sign else NUMERIC_PINF_BIN + + # Weights of py digits into a pg digit according to their positions. + # Starting with an index wi != 0 is equivalent to prepending 0's to + # the digits tuple, but without really changing it. + weights = (1000, 100, 10, 1) + wi = 0 + + ndigits = nzdigits = len(digits) + + # Find the last nonzero digit + while nzdigits > 0 and digits[nzdigits - 1] == 0: + nzdigits -= 1 + + if exp <= 0: + dscale = -exp + else: + dscale = 0 + # align the py digits to the pg digits if there's some py exponent + ndigits += exp % DEC_DIGITS + + if not nzdigits: + return _pack_numeric_head(0, 0, NUMERIC_POS, dscale) + + # Equivalent of 0-padding left to align the py digits to the pg digits + # but without changing the digits tuple. + mod = (ndigits - dscale) % DEC_DIGITS + if mod: + wi = DEC_DIGITS - mod + ndigits += wi + + tmp = nzdigits + wi + out = bytearray( + _pack_numeric_head( + tmp // DEC_DIGITS + (tmp % DEC_DIGITS and 1), # ndigits + (ndigits + exp) // DEC_DIGITS - 1, # weight + NUMERIC_NEG if sign else NUMERIC_POS, # sign + dscale, + ) + ) + + pgdigit = 0 + for i in range(nzdigits): + pgdigit += weights[wi] * digits[i] + wi += 1 + if wi >= DEC_DIGITS: + out += pack_uint2(pgdigit) + pgdigit = wi = 0 + + if pgdigit: + out += pack_uint2(pgdigit) + + return out + + +def dump_int_to_numeric_binary(obj: int) -> bytearray: + ndigits = int(obj.bit_length() * BIT_PER_PGDIGIT) + 1 + out = bytearray(b"\x00\x00" * (ndigits + 4)) + if obj < 0: + sign = NUMERIC_NEG + obj = -obj + else: + sign = NUMERIC_POS + + out[:8] = _pack_numeric_head(ndigits, ndigits - 1, sign, 0) + i = 8 + (ndigits - 1) * 2 + while obj: + rem = obj % 10_000 + obj //= 10_000 + out[i : i + 2] = pack_uint2(rem) + i -= 2 + + return out + + +def register_default_adapters(context: AdaptContext) -> None: + adapters = context.adapters + adapters.register_dumper(int, IntDumper) + adapters.register_dumper(int, IntBinaryDumper) + adapters.register_dumper(float, FloatDumper) + adapters.register_dumper(float, FloatBinaryDumper) + adapters.register_dumper(Int2, Int2Dumper) + adapters.register_dumper(Int4, Int4Dumper) + adapters.register_dumper(Int8, Int8Dumper) + adapters.register_dumper(IntNumeric, IntNumericDumper) + adapters.register_dumper(Oid, OidDumper) + + # The binary dumper is currently some 30% slower, so default to text + # (see tests/scripts/testdec.py for a rough benchmark) + # Also, must be after IntNumericDumper + adapters.register_dumper("decimal.Decimal", DecimalBinaryDumper) + adapters.register_dumper("decimal.Decimal", DecimalDumper) + + # Used only by oid, can take both int and Decimal as input + adapters.register_dumper(None, NumericBinaryDumper) + adapters.register_dumper(None, NumericDumper) + + adapters.register_dumper(Float4, Float4Dumper) + adapters.register_dumper(Float8, FloatDumper) + adapters.register_dumper(Int2, Int2BinaryDumper) + adapters.register_dumper(Int4, Int4BinaryDumper) + adapters.register_dumper(Int8, Int8BinaryDumper) + adapters.register_dumper(Oid, OidBinaryDumper) + adapters.register_dumper(Float4, Float4BinaryDumper) + adapters.register_dumper(Float8, FloatBinaryDumper) + adapters.register_loader("int2", IntLoader) + adapters.register_loader("int4", IntLoader) + adapters.register_loader("int8", IntLoader) + adapters.register_loader("oid", IntLoader) + adapters.register_loader("int2", Int2BinaryLoader) + adapters.register_loader("int4", Int4BinaryLoader) + adapters.register_loader("int8", Int8BinaryLoader) + adapters.register_loader("oid", OidBinaryLoader) + adapters.register_loader("float4", FloatLoader) + adapters.register_loader("float8", FloatLoader) + adapters.register_loader("float4", Float4BinaryLoader) + adapters.register_loader("float8", Float8BinaryLoader) + adapters.register_loader("numeric", NumericLoader) + adapters.register_loader("numeric", NumericBinaryLoader) diff --git a/lib/python3.11/site-packages/psycopg/types/range.py b/lib/python3.11/site-packages/psycopg/types/range.py new file mode 100644 index 0000000..a27d039 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/range.py @@ -0,0 +1,698 @@ +""" +Support for range types adaptation. +""" + +# Copyright (C) 2020 The Psycopg Team + +import re +from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Type, Tuple +from typing import cast +from decimal import Decimal +from datetime import date, datetime + +from .. import errors as e +from .. import postgres +from ..pq import Format +from ..abc import AdaptContext, Buffer, Dumper, DumperKey +from ..adapt import RecursiveDumper, RecursiveLoader, PyFormat +from .._struct import pack_len, unpack_len +from ..postgres import INVALID_OID, TEXT_OID +from .._typeinfo import RangeInfo as RangeInfo # exported here + +RANGE_EMPTY = 0x01 # range is empty +RANGE_LB_INC = 0x02 # lower bound is inclusive +RANGE_UB_INC = 0x04 # upper bound is inclusive +RANGE_LB_INF = 0x08 # lower bound is -infinity +RANGE_UB_INF = 0x10 # upper bound is +infinity + +_EMPTY_HEAD = bytes([RANGE_EMPTY]) + +T = TypeVar("T") + + +class Range(Generic[T]): + """Python representation for a PostgreSQL range type. + + :param lower: lower bound for the range. `!None` means unbound + :param upper: upper bound for the range. `!None` means unbound + :param bounds: one of the literal strings ``()``, ``[)``, ``(]``, ``[]``, + representing whether the lower or upper bounds are included + :param empty: if `!True`, the range is empty + + """ + + __slots__ = ("_lower", "_upper", "_bounds") + + def __init__( + self, + lower: Optional[T] = None, + upper: Optional[T] = None, + bounds: str = "[)", + empty: bool = False, + ): + if not empty: + if bounds not in ("[)", "(]", "()", "[]"): + raise ValueError("bound flags not valid: %r" % bounds) + + self._lower = lower + self._upper = upper + + # Make bounds consistent with infs + if lower is None and bounds[0] == "[": + bounds = "(" + bounds[1] + if upper is None and bounds[1] == "]": + bounds = bounds[0] + ")" + + self._bounds = bounds + else: + self._lower = self._upper = None + self._bounds = "" + + def __repr__(self) -> str: + if self._bounds: + args = f"{self._lower!r}, {self._upper!r}, {self._bounds!r}" + else: + args = "empty=True" + + return f"{self.__class__.__name__}({args})" + + def __str__(self) -> str: + if not self._bounds: + return "empty" + + items = [ + self._bounds[0], + str(self._lower), + ", ", + str(self._upper), + self._bounds[1], + ] + return "".join(items) + + @property + def lower(self) -> Optional[T]: + """The lower bound of the range. `!None` if empty or unbound.""" + return self._lower + + @property + def upper(self) -> Optional[T]: + """The upper bound of the range. `!None` if empty or unbound.""" + return self._upper + + @property + def bounds(self) -> str: + """The bounds string (two characters from '[', '(', ']', ')').""" + return self._bounds + + @property + def isempty(self) -> bool: + """`!True` if the range is empty.""" + return not self._bounds + + @property + def lower_inf(self) -> bool: + """`!True` if the range doesn't have a lower bound.""" + if not self._bounds: + return False + return self._lower is None + + @property + def upper_inf(self) -> bool: + """`!True` if the range doesn't have an upper bound.""" + if not self._bounds: + return False + return self._upper is None + + @property + def lower_inc(self) -> bool: + """`!True` if the lower bound is included in the range.""" + if not self._bounds or self._lower is None: + return False + return self._bounds[0] == "[" + + @property + def upper_inc(self) -> bool: + """`!True` if the upper bound is included in the range.""" + if not self._bounds or self._upper is None: + return False + return self._bounds[1] == "]" + + def __contains__(self, x: T) -> bool: + if not self._bounds: + return False + + if self._lower is not None: + if self._bounds[0] == "[": + # It doesn't seem that Python has an ABC for ordered types. + if x < self._lower: # type: ignore[operator] + return False + else: + if x <= self._lower: # type: ignore[operator] + return False + + if self._upper is not None: + if self._bounds[1] == "]": + if x > self._upper: # type: ignore[operator] + return False + else: + if x >= self._upper: # type: ignore[operator] + return False + + return True + + def __bool__(self) -> bool: + return bool(self._bounds) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Range): + return False + return ( + self._lower == other._lower + and self._upper == other._upper + and self._bounds == other._bounds + ) + + def __hash__(self) -> int: + return hash((self._lower, self._upper, self._bounds)) + + # as the postgres docs describe for the server-side stuff, + # ordering is rather arbitrary, but will remain stable + # and consistent. + + def __lt__(self, other: Any) -> bool: + if not isinstance(other, Range): + return NotImplemented + for attr in ("_lower", "_upper", "_bounds"): + self_value = getattr(self, attr) + other_value = getattr(other, attr) + if self_value == other_value: + pass + elif self_value is None: + return True + elif other_value is None: + return False + else: + return cast(bool, self_value < other_value) + return False + + def __le__(self, other: Any) -> bool: + return self == other or self < other # type: ignore + + def __gt__(self, other: Any) -> bool: + if isinstance(other, Range): + return other < self + else: + return NotImplemented + + def __ge__(self, other: Any) -> bool: + return self == other or self > other # type: ignore + + def __getstate__(self) -> Dict[str, Any]: + return { + slot: getattr(self, slot) for slot in self.__slots__ if hasattr(self, slot) + } + + def __setstate__(self, state: Dict[str, Any]) -> None: + for slot, value in state.items(): + setattr(self, slot, value) + + +# Subclasses to specify a specific subtype. Usually not needed: only needed +# in binary copy, where switching to text is not an option. + + +class Int4Range(Range[int]): + pass + + +class Int8Range(Range[int]): + pass + + +class NumericRange(Range[Decimal]): + pass + + +class DateRange(Range[date]): + pass + + +class TimestampRange(Range[datetime]): + pass + + +class TimestamptzRange(Range[datetime]): + pass + + +class BaseRangeDumper(RecursiveDumper): + def __init__(self, cls: type, context: Optional[AdaptContext] = None): + super().__init__(cls, context) + self.sub_dumper: Optional[Dumper] = None + self._adapt_format = PyFormat.from_pq(self.format) + + def get_key(self, obj: Range[Any], format: PyFormat) -> DumperKey: + # If we are a subclass whose oid is specified we don't need upgrade + if self.cls is not Range: + return self.cls + + item = self._get_item(obj) + if item is not None: + sd = self._tx.get_dumper(item, self._adapt_format) + return (self.cls, sd.get_key(item, format)) + else: + return (self.cls,) + + def upgrade(self, obj: Range[Any], format: PyFormat) -> "BaseRangeDumper": + # If we are a subclass whose oid is specified we don't need upgrade + if self.cls is not Range: + return self + + item = self._get_item(obj) + if item is None: + return RangeDumper(self.cls) + + dumper: BaseRangeDumper + if type(item) is int: + # postgres won't cast int4range -> int8range so we must use + # text format and unknown oid here + sd = self._tx.get_dumper(item, PyFormat.TEXT) + dumper = RangeDumper(self.cls, self._tx) + dumper.sub_dumper = sd + dumper.oid = INVALID_OID + return dumper + + sd = self._tx.get_dumper(item, format) + dumper = type(self)(self.cls, self._tx) + dumper.sub_dumper = sd + if sd.oid == INVALID_OID and isinstance(item, str): + # Work around the normal mapping where text is dumped as unknown + dumper.oid = self._get_range_oid(TEXT_OID) + else: + dumper.oid = self._get_range_oid(sd.oid) + + return dumper + + def _get_item(self, obj: Range[Any]) -> Any: + """ + Return a member representative of the range + """ + rv = obj.lower + return rv if rv is not None else obj.upper + + def _get_range_oid(self, sub_oid: int) -> int: + """ + Return the oid of the range from the oid of its elements. + """ + info = self._tx.adapters.types.get_by_subtype(RangeInfo, sub_oid) + return info.oid if info else INVALID_OID + + +class RangeDumper(BaseRangeDumper): + """ + Dumper for range types. + + The dumper can upgrade to one specific for a different range type. + """ + + def dump(self, obj: Range[Any]) -> Buffer: + item = self._get_item(obj) + if item is not None: + dump = self._tx.get_dumper(item, self._adapt_format).dump + else: + dump = fail_dump + + return dump_range_text(obj, dump) + + +def dump_range_text(obj: Range[Any], dump: Callable[[Any], Buffer]) -> Buffer: + if obj.isempty: + return b"empty" + + parts: List[Buffer] = [b"[" if obj.lower_inc else b"("] + + def dump_item(item: Any) -> Buffer: + ad = dump(item) + if not ad: + return b'""' + elif _re_needs_quotes.search(ad): + return b'"' + _re_esc.sub(rb"\1\1", ad) + b'"' + else: + return ad + + if obj.lower is not None: + parts.append(dump_item(obj.lower)) + + parts.append(b",") + + if obj.upper is not None: + parts.append(dump_item(obj.upper)) + + parts.append(b"]" if obj.upper_inc else b")") + + return b"".join(parts) + + +_re_needs_quotes = re.compile(rb'[",\\\s()\[\]]') +_re_esc = re.compile(rb"([\\\"])") + + +class RangeBinaryDumper(BaseRangeDumper): + format = Format.BINARY + + def dump(self, obj: Range[Any]) -> Buffer: + item = self._get_item(obj) + if item is not None: + dump = self._tx.get_dumper(item, self._adapt_format).dump + else: + dump = fail_dump + + return dump_range_binary(obj, dump) + + +def dump_range_binary(obj: Range[Any], dump: Callable[[Any], Buffer]) -> Buffer: + if not obj: + return _EMPTY_HEAD + + out = bytearray([0]) # will replace the head later + + head = 0 + if obj.lower_inc: + head |= RANGE_LB_INC + if obj.upper_inc: + head |= RANGE_UB_INC + + if obj.lower is not None: + data = dump(obj.lower) + out += pack_len(len(data)) + out += data + else: + head |= RANGE_LB_INF + + if obj.upper is not None: + data = dump(obj.upper) + out += pack_len(len(data)) + out += data + else: + head |= RANGE_UB_INF + + out[0] = head + return out + + +def fail_dump(obj: Any) -> Buffer: + raise e.InternalError("trying to dump a range element without information") + + +class BaseRangeLoader(RecursiveLoader, Generic[T]): + """Generic loader for a range. + + Subclasses must specify the oid of the subtype and the class to load. + """ + + subtype_oid: int + + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + super().__init__(oid, context) + self._load = self._tx.get_loader(self.subtype_oid, format=self.format).load + + +class RangeLoader(BaseRangeLoader[T]): + def load(self, data: Buffer) -> Range[T]: + return load_range_text(data, self._load)[0] + + +def load_range_text( + data: Buffer, load: Callable[[Buffer], Any] +) -> Tuple[Range[Any], int]: + if data == b"empty": + return Range(empty=True), 5 + + m = _re_range.match(data) + if m is None: + raise e.DataError( + f"failed to parse range: '{bytes(data).decode('utf8', 'replace')}'" + ) + + lower = None + item = m.group(3) + if item is None: + item = m.group(2) + if item is not None: + lower = load(_re_undouble.sub(rb"\1", item)) + else: + lower = load(item) + + upper = None + item = m.group(5) + if item is None: + item = m.group(4) + if item is not None: + upper = load(_re_undouble.sub(rb"\1", item)) + else: + upper = load(item) + + bounds = (m.group(1) + m.group(6)).decode() + + return Range(lower, upper, bounds), m.end() + + +_re_range = re.compile( + rb""" + ( \(|\[ ) # lower bound flag + (?: # lower bound: + " ( (?: [^"] | "")* ) " # - a quoted string + | ( [^",]+ ) # - or an unquoted string + )? # - or empty (not caught) + , + (?: # upper bound: + " ( (?: [^"] | "")* ) " # - a quoted string + | ( [^"\)\]]+ ) # - or an unquoted string + )? # - or empty (not caught) + ( \)|\] ) # upper bound flag + """, + re.VERBOSE, +) + +_re_undouble = re.compile(rb'(["\\])\1') + + +class RangeBinaryLoader(BaseRangeLoader[T]): + format = Format.BINARY + + def load(self, data: Buffer) -> Range[T]: + return load_range_binary(data, self._load) + + +def load_range_binary(data: Buffer, load: Callable[[Buffer], Any]) -> Range[Any]: + head = data[0] + if head & RANGE_EMPTY: + return Range(empty=True) + + lb = "[" if head & RANGE_LB_INC else "(" + ub = "]" if head & RANGE_UB_INC else ")" + + pos = 1 # after the head + if head & RANGE_LB_INF: + min = None + else: + length = unpack_len(data, pos)[0] + pos += 4 + min = load(data[pos : pos + length]) + pos += length + + if head & RANGE_UB_INF: + max = None + else: + length = unpack_len(data, pos)[0] + pos += 4 + max = load(data[pos : pos + length]) + pos += length + + return Range(min, max, lb + ub) + + +def register_range(info: RangeInfo, context: Optional[AdaptContext] = None) -> None: + """Register the adapters to load and dump a range type. + + :param info: The object with the information about the range to register. + :param context: The context where to register the adapters. If `!None`, + register it globally. + + Register loaders so that loading data of this type will result in a `Range` + with bounds parsed as the right subtype. + + .. note:: + + Registering the adapters doesn't affect objects already created, even + if they are children of the registered context. For instance, + registering the adapter globally doesn't affect already existing + connections. + """ + # A friendly error warning instead of an AttributeError in case fetch() + # failed and it wasn't noticed. + if not info: + raise TypeError("no info passed. Is the requested range available?") + + # Register arrays and type info + info.register(context) + + adapters = context.adapters if context else postgres.adapters + + # generate and register a customized text loader + loader: Type[RangeLoader[Any]] = type( + f"{info.name.title()}Loader", + (RangeLoader,), + {"subtype_oid": info.subtype_oid}, + ) + adapters.register_loader(info.oid, loader) + + # generate and register a customized binary loader + bloader: Type[RangeBinaryLoader[Any]] = type( + f"{info.name.title()}BinaryLoader", + (RangeBinaryLoader,), + {"subtype_oid": info.subtype_oid}, + ) + adapters.register_loader(info.oid, bloader) + + +# Text dumpers for builtin range types wrappers +# These are registered on specific subtypes so that the upgrade mechanism +# doesn't kick in. + + +class Int4RangeDumper(RangeDumper): + oid = postgres.types["int4range"].oid + + +class Int8RangeDumper(RangeDumper): + oid = postgres.types["int8range"].oid + + +class NumericRangeDumper(RangeDumper): + oid = postgres.types["numrange"].oid + + +class DateRangeDumper(RangeDumper): + oid = postgres.types["daterange"].oid + + +class TimestampRangeDumper(RangeDumper): + oid = postgres.types["tsrange"].oid + + +class TimestamptzRangeDumper(RangeDumper): + oid = postgres.types["tstzrange"].oid + + +# Binary dumpers for builtin range types wrappers +# These are registered on specific subtypes so that the upgrade mechanism +# doesn't kick in. + + +class Int4RangeBinaryDumper(RangeBinaryDumper): + oid = postgres.types["int4range"].oid + + +class Int8RangeBinaryDumper(RangeBinaryDumper): + oid = postgres.types["int8range"].oid + + +class NumericRangeBinaryDumper(RangeBinaryDumper): + oid = postgres.types["numrange"].oid + + +class DateRangeBinaryDumper(RangeBinaryDumper): + oid = postgres.types["daterange"].oid + + +class TimestampRangeBinaryDumper(RangeBinaryDumper): + oid = postgres.types["tsrange"].oid + + +class TimestamptzRangeBinaryDumper(RangeBinaryDumper): + oid = postgres.types["tstzrange"].oid + + +# Text loaders for builtin range types + + +class Int4RangeLoader(RangeLoader[int]): + subtype_oid = postgres.types["int4"].oid + + +class Int8RangeLoader(RangeLoader[int]): + subtype_oid = postgres.types["int8"].oid + + +class NumericRangeLoader(RangeLoader[Decimal]): + subtype_oid = postgres.types["numeric"].oid + + +class DateRangeLoader(RangeLoader[date]): + subtype_oid = postgres.types["date"].oid + + +class TimestampRangeLoader(RangeLoader[datetime]): + subtype_oid = postgres.types["timestamp"].oid + + +class TimestampTZRangeLoader(RangeLoader[datetime]): + subtype_oid = postgres.types["timestamptz"].oid + + +# Binary loaders for builtin range types + + +class Int4RangeBinaryLoader(RangeBinaryLoader[int]): + subtype_oid = postgres.types["int4"].oid + + +class Int8RangeBinaryLoader(RangeBinaryLoader[int]): + subtype_oid = postgres.types["int8"].oid + + +class NumericRangeBinaryLoader(RangeBinaryLoader[Decimal]): + subtype_oid = postgres.types["numeric"].oid + + +class DateRangeBinaryLoader(RangeBinaryLoader[date]): + subtype_oid = postgres.types["date"].oid + + +class TimestampRangeBinaryLoader(RangeBinaryLoader[datetime]): + subtype_oid = postgres.types["timestamp"].oid + + +class TimestampTZRangeBinaryLoader(RangeBinaryLoader[datetime]): + subtype_oid = postgres.types["timestamptz"].oid + + +def register_default_adapters(context: AdaptContext) -> None: + adapters = context.adapters + adapters.register_dumper(Range, RangeBinaryDumper) + adapters.register_dumper(Range, RangeDumper) + adapters.register_dumper(Int4Range, Int4RangeDumper) + adapters.register_dumper(Int8Range, Int8RangeDumper) + adapters.register_dumper(NumericRange, NumericRangeDumper) + adapters.register_dumper(DateRange, DateRangeDumper) + adapters.register_dumper(TimestampRange, TimestampRangeDumper) + adapters.register_dumper(TimestamptzRange, TimestamptzRangeDumper) + adapters.register_dumper(Int4Range, Int4RangeBinaryDumper) + adapters.register_dumper(Int8Range, Int8RangeBinaryDumper) + adapters.register_dumper(NumericRange, NumericRangeBinaryDumper) + adapters.register_dumper(DateRange, DateRangeBinaryDumper) + adapters.register_dumper(TimestampRange, TimestampRangeBinaryDumper) + adapters.register_dumper(TimestamptzRange, TimestamptzRangeBinaryDumper) + adapters.register_loader("int4range", Int4RangeLoader) + adapters.register_loader("int8range", Int8RangeLoader) + adapters.register_loader("numrange", NumericRangeLoader) + adapters.register_loader("daterange", DateRangeLoader) + adapters.register_loader("tsrange", TimestampRangeLoader) + adapters.register_loader("tstzrange", TimestampTZRangeLoader) + adapters.register_loader("int4range", Int4RangeBinaryLoader) + adapters.register_loader("int8range", Int8RangeBinaryLoader) + adapters.register_loader("numrange", NumericRangeBinaryLoader) + adapters.register_loader("daterange", DateRangeBinaryLoader) + adapters.register_loader("tsrange", TimestampRangeBinaryLoader) + adapters.register_loader("tstzrange", TimestampTZRangeBinaryLoader) diff --git a/lib/python3.11/site-packages/psycopg/types/shapely.py b/lib/python3.11/site-packages/psycopg/types/shapely.py new file mode 100644 index 0000000..e99f256 --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/shapely.py @@ -0,0 +1,75 @@ +""" +Adapters for PostGIS geometries +""" + +from typing import Optional + +from .. import postgres +from ..abc import AdaptContext, Buffer +from ..adapt import Dumper, Loader +from ..pq import Format +from .._typeinfo import TypeInfo + + +try: + from shapely.wkb import loads, dumps + from shapely.geometry.base import BaseGeometry + +except ImportError: + raise ImportError( + "The module psycopg.types.shapely requires the package 'Shapely'" + " to be installed" + ) + + +class GeometryBinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> "BaseGeometry": + if not isinstance(data, bytes): + data = bytes(data) + return loads(data) + + +class GeometryLoader(Loader): + def load(self, data: Buffer) -> "BaseGeometry": + # it's a hex string in binary + if isinstance(data, memoryview): + data = bytes(data) + return loads(data.decode(), hex=True) + + +class BaseGeometryBinaryDumper(Dumper): + format = Format.BINARY + + def dump(self, obj: "BaseGeometry") -> bytes: + return dumps(obj) # type: ignore + + +class BaseGeometryDumper(Dumper): + def dump(self, obj: "BaseGeometry") -> bytes: + return dumps(obj, hex=True).encode() # type: ignore + + +def register_shapely(info: TypeInfo, context: Optional[AdaptContext] = None) -> None: + """Register Shapely dumper and loaders.""" + + # A friendly error warning instead of an AttributeError in case fetch() + # failed and it wasn't noticed. + if not info: + raise TypeError("no info passed. Is the 'postgis' extension loaded?") + + info.register(context) + adapters = context.adapters if context else postgres.adapters + + class GeometryDumper(BaseGeometryDumper): + oid = info.oid + + class GeometryBinaryDumper(BaseGeometryBinaryDumper): + oid = info.oid + + adapters.register_loader(info.oid, GeometryBinaryLoader) + adapters.register_loader(info.oid, GeometryLoader) + # Default binary dump + adapters.register_dumper(BaseGeometry, GeometryDumper) + adapters.register_dumper(BaseGeometry, GeometryBinaryDumper) diff --git a/lib/python3.11/site-packages/psycopg/types/string.py b/lib/python3.11/site-packages/psycopg/types/string.py new file mode 100644 index 0000000..22f827e --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/string.py @@ -0,0 +1,229 @@ +""" +Adapters for textual types. +""" + +# Copyright (C) 2020 The Psycopg Team + +from typing import Optional, Union, TYPE_CHECKING + +from .. import postgres +from ..pq import Format, Escaping +from ..abc import AdaptContext +from ..adapt import Buffer, Dumper, Loader +from ..errors import DataError +from .._encodings import conn_encoding + +if TYPE_CHECKING: + from ..pq.abc import Escaping as EscapingProto + + +class _BaseStrDumper(Dumper): + def __init__(self, cls: type, context: Optional[AdaptContext] = None): + super().__init__(cls, context) + enc = conn_encoding(self.connection) + self._encoding = enc if enc != "ascii" else "utf-8" + + +class _StrBinaryDumper(_BaseStrDumper): + """ + Base class to dump a Python strings to a Postgres text type, in binary format. + + Subclasses shall specify the oids of real types (text, varchar, name...). + """ + + format = Format.BINARY + + def dump(self, obj: str) -> bytes: + # the server will raise DataError subclass if the string contains 0x00 + return obj.encode(self._encoding) + + +class _StrDumper(_BaseStrDumper): + """ + Base class to dump a Python strings to a Postgres text type, in text format. + + Subclasses shall specify the oids of real types (text, varchar, name...). + """ + + def dump(self, obj: str) -> bytes: + if "\x00" in obj: + raise DataError("PostgreSQL text fields cannot contain NUL (0x00) bytes") + else: + return obj.encode(self._encoding) + + +# The next are concrete dumpers, each one specifying the oid they dump to. + + +class StrBinaryDumper(_StrBinaryDumper): + oid = postgres.types["text"].oid + + +class StrBinaryDumperVarchar(_StrBinaryDumper): + oid = postgres.types["varchar"].oid + + +class StrBinaryDumperName(_StrBinaryDumper): + oid = postgres.types["name"].oid + + +class StrDumper(_StrDumper): + """ + Dumper for strings in text format to the text oid. + + Note that this dumper is not used by default because the type is too strict + and PostgreSQL would require an explicit casts to everything that is not a + text field. However it is useful where the unknown oid is ambiguous and the + text oid is required, for instance with variadic functions. + """ + + oid = postgres.types["text"].oid + + +class StrDumperVarchar(_StrDumper): + oid = postgres.types["varchar"].oid + + +class StrDumperName(_StrDumper): + oid = postgres.types["name"].oid + + +class StrDumperUnknown(_StrDumper): + """ + Dumper for strings in text format to the unknown oid. + + This dumper is the default dumper for strings and allows to use Python + strings to represent almost every data type. In a few places, however, the + unknown oid is not accepted (for instance in variadic functions such as + 'concat()'). In that case either a cast on the placeholder ('%s::text') or + the StrTextDumper should be used. + """ + + pass + + +class TextLoader(Loader): + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + super().__init__(oid, context) + enc = conn_encoding(self.connection) + self._encoding = enc if enc != "ascii" else "" + + def load(self, data: Buffer) -> Union[bytes, str]: + if self._encoding: + if isinstance(data, memoryview): + data = bytes(data) + return data.decode(self._encoding) + else: + # return bytes for SQL_ASCII db + if not isinstance(data, bytes): + data = bytes(data) + return data + + +class TextBinaryLoader(TextLoader): + format = Format.BINARY + + +class BytesDumper(Dumper): + oid = postgres.types["bytea"].oid + _qprefix = b"" + + def __init__(self, cls: type, context: Optional[AdaptContext] = None): + super().__init__(cls, context) + self._esc = Escaping(self.connection.pgconn if self.connection else None) + + def dump(self, obj: Buffer) -> Buffer: + return self._esc.escape_bytea(obj) + + def quote(self, obj: Buffer) -> bytes: + escaped = self.dump(obj) + + # We cannot use the base quoting because escape_bytea already returns + # the quotes content. if scs is off it will escape the backslashes in + # the format, otherwise it won't, but it doesn't tell us what quotes to + # use. + if self.connection: + if not self._qprefix: + scs = self.connection.pgconn.parameter_status( + b"standard_conforming_strings" + ) + self._qprefix = b"'" if scs == b"on" else b" E'" + + return self._qprefix + escaped + b"'" + + # We don't have a connection, so someone is using us to generate a file + # to use off-line or something like that. PQescapeBytea, like its + # string counterpart, is not predictable whether it will escape + # backslashes. + rv: bytes = b" E'" + escaped + b"'" + if self._esc.escape_bytea(b"\x00") == b"\\000": + rv = rv.replace(b"\\", b"\\\\") + return rv + + +class BytesBinaryDumper(Dumper): + format = Format.BINARY + oid = postgres.types["bytea"].oid + + def dump(self, obj: Buffer) -> Buffer: + return obj + + +class ByteaLoader(Loader): + _escaping: "EscapingProto" + + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + super().__init__(oid, context) + if not hasattr(self.__class__, "_escaping"): + self.__class__._escaping = Escaping() + + def load(self, data: Buffer) -> bytes: + return self._escaping.unescape_bytea(data) + + +class ByteaBinaryLoader(Loader): + format = Format.BINARY + + def load(self, data: Buffer) -> Buffer: + return data + + +def register_default_adapters(context: AdaptContext) -> None: + adapters = context.adapters + + # NOTE: the order the dumpers are registered is relevant. The last one + # registered becomes the default for each type. Usually, binary is the + # default dumper. For text we use the text dumper as default because it + # plays the role of unknown, and it can be cast automatically to other + # types. However, before that, we register dumper with 'text', 'varchar', + # 'name' oids, which will be used when a text dumper is looked up by oid. + adapters.register_dumper(str, StrBinaryDumperName) + adapters.register_dumper(str, StrBinaryDumperVarchar) + adapters.register_dumper(str, StrBinaryDumper) + adapters.register_dumper(str, StrDumperName) + adapters.register_dumper(str, StrDumperVarchar) + adapters.register_dumper(str, StrDumper) + adapters.register_dumper(str, StrDumperUnknown) + + adapters.register_loader(postgres.INVALID_OID, TextLoader) + adapters.register_loader("bpchar", TextLoader) + adapters.register_loader("name", TextLoader) + adapters.register_loader("text", TextLoader) + adapters.register_loader("varchar", TextLoader) + adapters.register_loader('"char"', TextLoader) + adapters.register_loader("bpchar", TextBinaryLoader) + adapters.register_loader("name", TextBinaryLoader) + adapters.register_loader("text", TextBinaryLoader) + adapters.register_loader("varchar", TextBinaryLoader) + adapters.register_loader('"char"', TextBinaryLoader) + + adapters.register_dumper(bytes, BytesDumper) + adapters.register_dumper(bytearray, BytesDumper) + adapters.register_dumper(memoryview, BytesDumper) + adapters.register_dumper(bytes, BytesBinaryDumper) + adapters.register_dumper(bytearray, BytesBinaryDumper) + adapters.register_dumper(memoryview, BytesBinaryDumper) + + adapters.register_loader("bytea", ByteaLoader) + adapters.register_loader(postgres.INVALID_OID, ByteaBinaryLoader) + adapters.register_loader("bytea", ByteaBinaryLoader) diff --git a/lib/python3.11/site-packages/psycopg/types/uuid.py b/lib/python3.11/site-packages/psycopg/types/uuid.py new file mode 100644 index 0000000..3cc5eba --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/types/uuid.py @@ -0,0 +1,62 @@ +""" +Adapters for the UUID type. +""" + +# Copyright (C) 2020 The Psycopg Team + +from typing import Callable, Optional, TYPE_CHECKING + +from .. import postgres +from ..pq import Format +from ..abc import AdaptContext +from ..adapt import Buffer, Dumper, Loader + +if TYPE_CHECKING: + import uuid + +# Importing the uuid module is slow, so import it only on request. +UUID: Callable[..., "uuid.UUID"] = None # type: ignore[assignment] + + +class UUIDDumper(Dumper): + oid = postgres.types["uuid"].oid + + def dump(self, obj: "uuid.UUID") -> bytes: + return obj.hex.encode() + + +class UUIDBinaryDumper(UUIDDumper): + format = Format.BINARY + + def dump(self, obj: "uuid.UUID") -> bytes: + return obj.bytes + + +class UUIDLoader(Loader): + def __init__(self, oid: int, context: Optional[AdaptContext] = None): + super().__init__(oid, context) + global UUID + if UUID is None: + from uuid import UUID + + def load(self, data: Buffer) -> "uuid.UUID": + if isinstance(data, memoryview): + data = bytes(data) + return UUID(data.decode()) + + +class UUIDBinaryLoader(UUIDLoader): + format = Format.BINARY + + def load(self, data: Buffer) -> "uuid.UUID": + if isinstance(data, memoryview): + data = bytes(data) + return UUID(bytes=data) + + +def register_default_adapters(context: AdaptContext) -> None: + adapters = context.adapters + adapters.register_dumper("uuid.UUID", UUIDDumper) + adapters.register_dumper("uuid.UUID", UUIDBinaryDumper) + adapters.register_loader("uuid", UUIDLoader) + adapters.register_loader("uuid", UUIDBinaryLoader) diff --git a/lib/python3.11/site-packages/psycopg/version.py b/lib/python3.11/site-packages/psycopg/version.py new file mode 100644 index 0000000..4f223ee --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/version.py @@ -0,0 +1,14 @@ +""" +psycopg distribution version file. +""" + +# Copyright (C) 2020 The Psycopg Team + +# Use a versioning scheme as defined in +# https://www.python.org/dev/peps/pep-0440/ + +# STOP AND READ! if you change: +__version__ = "3.1.9" +# also change: +# - `docs/news.rst` to declare this as the current version or an unreleased one +# - `psycopg_c/psycopg_c/version.py` to the same version. diff --git a/lib/python3.11/site-packages/psycopg/waiting.py b/lib/python3.11/site-packages/psycopg/waiting.py new file mode 100644 index 0000000..2c4a10f --- /dev/null +++ b/lib/python3.11/site-packages/psycopg/waiting.py @@ -0,0 +1,324 @@ +""" +Code concerned with waiting in different contexts (blocking, async, etc). + +These functions are designed to consume the generators returned by the +`generators` module function and to return their final value. + +""" + +# Copyright (C) 2020 The Psycopg Team + + +import os +import select +import selectors +from typing import Dict, Optional +from asyncio import get_event_loop, wait_for, Event, TimeoutError +from selectors import DefaultSelector + +from . import errors as e +from .abc import RV, PQGen, PQGenConn, WaitFunc +from ._enums import Wait as Wait, Ready as Ready # re-exported +from ._cmodule import _psycopg + +WAIT_R = Wait.R +WAIT_W = Wait.W +WAIT_RW = Wait.RW +READY_R = Ready.R +READY_W = Ready.W +READY_RW = Ready.RW + + +def wait_selector(gen: PQGen[RV], fileno: int, timeout: Optional[float] = None) -> RV: + """ + Wait for a generator using the best strategy available. + + :param gen: a generator performing database operations and yielding + `Ready` values when it would block. + :param fileno: the file descriptor to wait on. + :param timeout: timeout (in seconds) to check for other interrupt, e.g. + to allow Ctrl-C. + :type timeout: float + :return: whatever `!gen` returns on completion. + + Consume `!gen`, scheduling `fileno` for completion when it is reported to + block. Once ready again send the ready state back to `!gen`. + """ + try: + s = next(gen) + with DefaultSelector() as sel: + while True: + sel.register(fileno, s) + rlist = None + while not rlist: + rlist = sel.select(timeout=timeout) + sel.unregister(fileno) + # note: this line should require a cast, but mypy doesn't complain + ready: Ready = rlist[0][1] + assert s & ready + s = gen.send(ready) + + except StopIteration as ex: + rv: RV = ex.args[0] if ex.args else None + return rv + + +def wait_conn(gen: PQGenConn[RV], timeout: Optional[float] = None) -> RV: + """ + Wait for a connection generator using the best strategy available. + + :param gen: a generator performing database operations and yielding + (fd, `Ready`) pairs when it would block. + :param timeout: timeout (in seconds) to check for other interrupt, e.g. + to allow Ctrl-C. If zero or None, wait indefinitely. + :type timeout: float + :return: whatever `!gen` returns on completion. + + Behave like in `wait()`, but take the fileno to wait from the generator + itself, which might change during processing. + """ + try: + fileno, s = next(gen) + if not timeout: + timeout = None + with DefaultSelector() as sel: + while True: + sel.register(fileno, s) + rlist = sel.select(timeout=timeout) + sel.unregister(fileno) + if not rlist: + raise e.ConnectionTimeout("connection timeout expired") + ready: Ready = rlist[0][1] # type: ignore[assignment] + fileno, s = gen.send(ready) + + except StopIteration as ex: + rv: RV = ex.args[0] if ex.args else None + return rv + + +async def wait_async(gen: PQGen[RV], fileno: int) -> RV: + """ + Coroutine waiting for a generator to complete. + + :param gen: a generator performing database operations and yielding + `Ready` values when it would block. + :param fileno: the file descriptor to wait on. + :return: whatever `!gen` returns on completion. + + Behave like in `wait()`, but exposing an `asyncio` interface. + """ + # Use an event to block and restart after the fd state changes. + # Not sure this is the best implementation but it's a start. + ev = Event() + loop = get_event_loop() + ready: Ready + s: Wait + + def wakeup(state: Ready) -> None: + nonlocal ready + ready |= state # type: ignore[assignment] + ev.set() + + try: + s = next(gen) + while True: + reader = s & WAIT_R + writer = s & WAIT_W + if not reader and not writer: + raise e.InternalError(f"bad poll status: {s}") + ev.clear() + ready = 0 # type: ignore[assignment] + if reader: + loop.add_reader(fileno, wakeup, READY_R) + if writer: + loop.add_writer(fileno, wakeup, READY_W) + try: + await ev.wait() + finally: + if reader: + loop.remove_reader(fileno) + if writer: + loop.remove_writer(fileno) + s = gen.send(ready) + + except StopIteration as ex: + rv: RV = ex.args[0] if ex.args else None + return rv + + +async def wait_conn_async(gen: PQGenConn[RV], timeout: Optional[float] = None) -> RV: + """ + Coroutine waiting for a connection generator to complete. + + :param gen: a generator performing database operations and yielding + (fd, `Ready`) pairs when it would block. + :param timeout: timeout (in seconds) to check for other interrupt, e.g. + to allow Ctrl-C. If zero or None, wait indefinitely. + :return: whatever `!gen` returns on completion. + + Behave like in `wait()`, but take the fileno to wait from the generator + itself, which might change during processing. + """ + # Use an event to block and restart after the fd state changes. + # Not sure this is the best implementation but it's a start. + ev = Event() + loop = get_event_loop() + ready: Ready + s: Wait + + def wakeup(state: Ready) -> None: + nonlocal ready + ready = state + ev.set() + + try: + fileno, s = next(gen) + if not timeout: + timeout = None + while True: + reader = s & WAIT_R + writer = s & WAIT_W + if not reader and not writer: + raise e.InternalError(f"bad poll status: {s}") + ev.clear() + ready = 0 # type: ignore[assignment] + if reader: + loop.add_reader(fileno, wakeup, READY_R) + if writer: + loop.add_writer(fileno, wakeup, READY_W) + try: + await wait_for(ev.wait(), timeout) + finally: + if reader: + loop.remove_reader(fileno) + if writer: + loop.remove_writer(fileno) + fileno, s = gen.send(ready) + + except TimeoutError: + raise e.ConnectionTimeout("connection timeout expired") + + except StopIteration as ex: + rv: RV = ex.args[0] if ex.args else None + return rv + + +# Specialised implementation of wait functions. + + +def wait_select(gen: PQGen[RV], fileno: int, timeout: Optional[float] = None) -> RV: + """ + Wait for a generator using select where supported. + """ + try: + s = next(gen) + + empty = () + fnlist = (fileno,) + while True: + rl, wl, xl = select.select( + fnlist if s & WAIT_R else empty, + fnlist if s & WAIT_W else empty, + fnlist, + timeout, + ) + ready = 0 + if rl: + ready = READY_R + if wl: + ready |= READY_W + if not ready: + continue + # assert s & ready + s = gen.send(ready) # type: ignore + + except StopIteration as ex: + rv: RV = ex.args[0] if ex.args else None + return rv + + +poll_evmasks: Dict[Wait, int] + +if hasattr(selectors, "EpollSelector"): + poll_evmasks = { + WAIT_R: select.EPOLLONESHOT | select.EPOLLIN, + WAIT_W: select.EPOLLONESHOT | select.EPOLLOUT, + WAIT_RW: select.EPOLLONESHOT | select.EPOLLIN | select.EPOLLOUT, + } +else: + poll_evmasks = {} + + +def wait_epoll(gen: PQGen[RV], fileno: int, timeout: Optional[float] = None) -> RV: + """ + Wait for a generator using epoll where supported. + + Parameters are like for `wait()`. If it is detected that the best selector + strategy is `epoll` then this function will be used instead of `wait`. + + See also: https://linux.die.net/man/2/epoll_ctl + """ + try: + s = next(gen) + + if timeout is None or timeout < 0: + timeout = 0 + else: + timeout = int(timeout * 1000.0) + + with select.epoll() as epoll: + evmask = poll_evmasks[s] + epoll.register(fileno, evmask) + while True: + fileevs = None + while not fileevs: + fileevs = epoll.poll(timeout) + ev = fileevs[0][1] + ready = 0 + if ev & ~select.EPOLLOUT: + ready = READY_R + if ev & ~select.EPOLLIN: + ready |= READY_W + # assert s & ready + s = gen.send(ready) + evmask = poll_evmasks[s] + epoll.modify(fileno, evmask) + + except StopIteration as ex: + rv: RV = ex.args[0] if ex.args else None + return rv + + +if _psycopg: + wait_c = _psycopg.wait_c + + +# Choose the best wait strategy for the platform. +# +# the selectors objects have a generic interface but come with some overhead, +# so we also offer more finely tuned implementations. + +wait: WaitFunc + +# Allow the user to choose a specific function for testing +if "PSYCOPG_WAIT_FUNC" in os.environ: + fname = os.environ["PSYCOPG_WAIT_FUNC"] + if not fname.startswith("wait_") or fname not in globals(): + raise ImportError( + "PSYCOPG_WAIT_FUNC should be the name of an available wait function;" + f" got {fname!r}" + ) + wait = globals()[fname] + +elif _psycopg: + wait = wait_c + +elif selectors.DefaultSelector is getattr(selectors, "SelectSelector", None): + # On Windows, SelectSelector should be the default. + wait = wait_select + +elif selectors.DefaultSelector is getattr(selectors, "EpollSelector", None): + wait = wait_epoll + +else: + wait = wait_selector diff --git a/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/INSTALLER b/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/LICENSE b/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/LICENSE new file mode 100644 index 0000000..f26bcf4 --- /dev/null +++ b/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/LICENSE @@ -0,0 +1,279 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see https://opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +Python software and documentation are licensed under the +Python Software Foundation License Version 2. + +Starting with Python 3.8.6, examples, recipes, and other code in +the documentation are dual licensed under the PSF License Version 2 +and the Zero-Clause BSD license. + +Some software incorporated into Python is under different licenses. +The licenses are listed with code falling under that license. + + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +---------------------------------------------------------------------- + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/METADATA b/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/METADATA new file mode 100644 index 0000000..70e1d63 --- /dev/null +++ b/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/METADATA @@ -0,0 +1,69 @@ +Metadata-Version: 2.1 +Name: typing_extensions +Version: 4.7.1 +Summary: Backported and Experimental Type Hints for Python 3.7+ +Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing +Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development +Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues +Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md +Project-URL: Documentation, https://typing-extensions.readthedocs.io/ +Project-URL: Home, https://github.com/python/typing_extensions +Project-URL: Q & A, https://github.com/python/typing/discussions +Project-URL: Repository, https://github.com/python/typing_extensions + +# Typing Extensions + +[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing) + +[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) – +[PyPI](https://pypi.org/project/typing-extensions/) + +## Overview + +The `typing_extensions` module serves two related purposes: + +- Enable use of new type system features on older Python versions. For example, + `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows + users on previous Python versions to use it too. +- Enable experimentation with new type system PEPs before they are accepted and + added to the `typing` module. + +`typing_extensions` is treated specially by static type checkers such as +mypy and pyright. Objects defined in `typing_extensions` are treated the same +way as equivalent forms in `typing`. + +`typing_extensions` uses +[Semantic Versioning](https://semver.org/). The +major version will be incremented only for backwards-incompatible changes. +Therefore, it's safe to depend +on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`, +where `x.y` is the first version that includes all features you need. + +`typing_extensions` supports Python versions 3.7 and higher. + +## Included items + +See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a +complete listing of module contents. + +## Contributing + +See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md) +for how to contribute to `typing_extensions`. + diff --git a/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/RECORD b/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/RECORD new file mode 100644 index 0000000..ae54dd2 --- /dev/null +++ b/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/RECORD @@ -0,0 +1,7 @@ +__pycache__/typing_extensions.cpython-311.pyc,, +typing_extensions-4.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +typing_extensions-4.7.1.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936 +typing_extensions-4.7.1.dist-info/METADATA,sha256=0W71u6mC24oVYJzibNoq2l-bQnVoU_p25uiNhAq5OcA,3078 +typing_extensions-4.7.1.dist-info/RECORD,, +typing_extensions-4.7.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +typing_extensions.py,sha256=zkLXjhMMSmKvLLqj-MCunbScGMu7kPLZYUsLun38I00,111082 diff --git a/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/WHEEL b/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/WHEEL new file mode 100644 index 0000000..3b5e64b --- /dev/null +++ b/lib/python3.11/site-packages/typing_extensions-4.7.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/lib/python3.11/site-packages/typing_extensions.py b/lib/python3.11/site-packages/typing_extensions.py new file mode 100644 index 0000000..901f3b9 --- /dev/null +++ b/lib/python3.11/site-packages/typing_extensions.py @@ -0,0 +1,3072 @@ +import abc +import collections +import collections.abc +import functools +import inspect +import operator +import sys +import types as _types +import typing +import warnings + +__all__ = [ + # Super-special typing primitives. + 'Any', + 'ClassVar', + 'Concatenate', + 'Final', + 'LiteralString', + 'ParamSpec', + 'ParamSpecArgs', + 'ParamSpecKwargs', + 'Self', + 'Type', + 'TypeVar', + 'TypeVarTuple', + 'Unpack', + + # ABCs (from collections.abc). + 'Awaitable', + 'AsyncIterator', + 'AsyncIterable', + 'Coroutine', + 'AsyncGenerator', + 'AsyncContextManager', + 'Buffer', + 'ChainMap', + + # Concrete collection types. + 'ContextManager', + 'Counter', + 'Deque', + 'DefaultDict', + 'NamedTuple', + 'OrderedDict', + 'TypedDict', + + # Structural checks, a.k.a. protocols. + 'SupportsAbs', + 'SupportsBytes', + 'SupportsComplex', + 'SupportsFloat', + 'SupportsIndex', + 'SupportsInt', + 'SupportsRound', + + # One-off things. + 'Annotated', + 'assert_never', + 'assert_type', + 'clear_overloads', + 'dataclass_transform', + 'deprecated', + 'get_overloads', + 'final', + 'get_args', + 'get_origin', + 'get_original_bases', + 'get_protocol_members', + 'get_type_hints', + 'IntVar', + 'is_protocol', + 'is_typeddict', + 'Literal', + 'NewType', + 'overload', + 'override', + 'Protocol', + 'reveal_type', + 'runtime', + 'runtime_checkable', + 'Text', + 'TypeAlias', + 'TypeAliasType', + 'TypeGuard', + 'TYPE_CHECKING', + 'Never', + 'NoReturn', + 'Required', + 'NotRequired', + + # Pure aliases, have always been in typing + 'AbstractSet', + 'AnyStr', + 'BinaryIO', + 'Callable', + 'Collection', + 'Container', + 'Dict', + 'ForwardRef', + 'FrozenSet', + 'Generator', + 'Generic', + 'Hashable', + 'IO', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'List', + 'Mapping', + 'MappingView', + 'Match', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Optional', + 'Pattern', + 'Reversible', + 'Sequence', + 'Set', + 'Sized', + 'TextIO', + 'Tuple', + 'Union', + 'ValuesView', + 'cast', + 'no_type_check', + 'no_type_check_decorator', +] + +# for backward compatibility +PEP_560 = True +GenericMeta = type + +# The functions below are modified copies of typing internal helpers. +# They are needed by _ProtocolMeta and they provide support for PEP 646. + + +class _Sentinel: + def __repr__(self): + return "" + + +_marker = _Sentinel() + + +def _check_generic(cls, parameters, elen=_marker): + """Check correct count for parameters of a generic cls (internal helper). + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + if elen is _marker: + if not hasattr(cls, "__parameters__") or not cls.__parameters__: + raise TypeError(f"{cls} is not a generic class") + elen = len(cls.__parameters__) + alen = len(parameters) + if alen != elen: + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) + if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): + return + raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" + f" actual {alen}, expected {elen}") + + +if sys.version_info >= (3, 10): + def _should_collect_from_parameters(t): + return isinstance( + t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) + ) +elif sys.version_info >= (3, 9): + def _should_collect_from_parameters(t): + return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) +else: + def _should_collect_from_parameters(t): + return isinstance(t, typing._GenericAlias) and not t._special + + +def _collect_type_vars(types, typevar_types=None): + """Collect all type variable contained in types in order of + first appearance (lexicographic order). For example:: + + _collect_type_vars((T, List[S, T])) == (T, S) + """ + if typevar_types is None: + typevar_types = typing.TypeVar + tvars = [] + for t in types: + if ( + isinstance(t, typevar_types) and + t not in tvars and + not _is_unpack(t) + ): + tvars.append(t) + if _should_collect_from_parameters(t): + tvars.extend([t for t in t.__parameters__ if t not in tvars]) + return tuple(tvars) + + +NoReturn = typing.NoReturn + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = typing.TypeVar('T') # Any type. +KT = typing.TypeVar('KT') # Key type. +VT = typing.TypeVar('VT') # Value type. +T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. +T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. + + +if sys.version_info >= (3, 11): + from typing import Any +else: + + class _AnyMeta(type): + def __instancecheck__(self, obj): + if self is Any: + raise TypeError("typing_extensions.Any cannot be used with isinstance()") + return super().__instancecheck__(obj) + + def __repr__(self): + if self is Any: + return "typing_extensions.Any" + return super().__repr__() + + class Any(metaclass=_AnyMeta): + """Special type indicating an unconstrained type. + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + checks. + """ + def __new__(cls, *args, **kwargs): + if cls is Any: + raise TypeError("Any cannot be instantiated") + return super().__new__(cls, *args, **kwargs) + + +ClassVar = typing.ClassVar + + +class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + +# On older versions of typing there is an internal class named "Final". +# 3.8+ +if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): + Final = typing.Final +# 3.7 +else: + class _FinalForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + Final = _FinalForm('Final', + doc="""A special typing construct to indicate that a name + cannot be re-assigned or overridden in a subclass. + For example: + + MAX_SIZE: Final = 9000 + MAX_SIZE += 1 # Error reported by type checker + + class Connection: + TIMEOUT: Final[int] = 10 + class FastConnector(Connection): + TIMEOUT = 1 # Error reported by type checker + + There is no runtime checking of these properties.""") + +if sys.version_info >= (3, 11): + final = typing.final +else: + # @final exists in 3.8+, but we backport it for all versions + # before 3.11 to keep support for the __final__ attribute. + # See https://bugs.python.org/issue46342 + def final(f): + """This decorator can be used to indicate to type checkers that + the decorated method cannot be overridden, and decorated class + cannot be subclassed. For example: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + + There is no runtime checking of these properties. The decorator + sets the ``__final__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + """ + try: + f.__final__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return f + + +def IntVar(name): + return typing.TypeVar(name) + + +# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 +if sys.version_info >= (3, 10, 1): + Literal = typing.Literal +else: + def _flatten_literal_params(parameters): + """An internal helper for Literal creation: flatten Literals among parameters""" + params = [] + for p in parameters: + if isinstance(p, _LiteralGenericAlias): + params.extend(p.__args__) + else: + params.append(p) + return tuple(params) + + def _value_and_type_iter(params): + for p in params: + yield p, type(p) + + class _LiteralGenericAlias(typing._GenericAlias, _root=True): + def __eq__(self, other): + if not isinstance(other, _LiteralGenericAlias): + return NotImplemented + these_args_deduped = set(_value_and_type_iter(self.__args__)) + other_args_deduped = set(_value_and_type_iter(other.__args__)) + return these_args_deduped == other_args_deduped + + def __hash__(self): + return hash(frozenset(_value_and_type_iter(self.__args__))) + + class _LiteralForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, doc: str): + self._name = 'Literal' + self._doc = self.__doc__ = doc + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + + parameters = _flatten_literal_params(parameters) + + val_type_pairs = list(_value_and_type_iter(parameters)) + try: + deduped_pairs = set(val_type_pairs) + except TypeError: + # unhashable parameters + pass + else: + # similar logic to typing._deduplicate on Python 3.9+ + if len(deduped_pairs) < len(val_type_pairs): + new_parameters = [] + for pair in val_type_pairs: + if pair in deduped_pairs: + new_parameters.append(pair[0]) + deduped_pairs.remove(pair) + assert not deduped_pairs, deduped_pairs + parameters = tuple(new_parameters) + + return _LiteralGenericAlias(self, parameters) + + Literal = _LiteralForm(doc="""\ + A type that can be used to indicate to type checkers + that the corresponding value has a value literally equivalent + to the provided parameter. For example: + + var: Literal[4] = 4 + + The type checker understands that 'var' is literally equal to + the value 4 and no other value. + + Literal[...] cannot be subclassed. There is no runtime + checking verifying that the parameter is actually a value + instead of a type.""") + + +_overload_dummy = typing._overload_dummy + + +if hasattr(typing, "get_overloads"): # 3.11+ + overload = typing.overload + get_overloads = typing.get_overloads + clear_overloads = typing.clear_overloads +else: + # {module: {qualname: {firstlineno: func}}} + _overload_registry = collections.defaultdict( + functools.partial(collections.defaultdict, dict) + ) + + def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + + The overloads for a function can be retrieved at runtime using the + get_overloads() function. + """ + # classmethod and staticmethod + f = getattr(func, "__func__", func) + try: + _overload_registry[f.__module__][f.__qualname__][ + f.__code__.co_firstlineno + ] = func + except AttributeError: + # Not a normal function; ignore. + pass + return _overload_dummy + + def get_overloads(func): + """Return all defined overloads for *func* as a sequence.""" + # classmethod and staticmethod + f = getattr(func, "__func__", func) + if f.__module__ not in _overload_registry: + return [] + mod_dict = _overload_registry[f.__module__] + if f.__qualname__ not in mod_dict: + return [] + return list(mod_dict[f.__qualname__].values()) + + def clear_overloads(): + """Clear all overloads in the registry.""" + _overload_registry.clear() + + +# This is not a real generic class. Don't use outside annotations. +Type = typing.Type + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + + +Awaitable = typing.Awaitable +Coroutine = typing.Coroutine +AsyncIterable = typing.AsyncIterable +AsyncIterator = typing.AsyncIterator +Deque = typing.Deque +ContextManager = typing.ContextManager +AsyncContextManager = typing.AsyncContextManager +DefaultDict = typing.DefaultDict + +# 3.7.2+ +if hasattr(typing, 'OrderedDict'): + OrderedDict = typing.OrderedDict +# 3.7.0-3.7.2 +else: + OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) + +Counter = typing.Counter +ChainMap = typing.ChainMap +AsyncGenerator = typing.AsyncGenerator +Text = typing.Text +TYPE_CHECKING = typing.TYPE_CHECKING + + +_PROTO_ALLOWLIST = { + 'collections.abc': [ + 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', + ], + 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], + 'typing_extensions': ['Buffer'], +} + + +_EXCLUDED_ATTRS = { + "__abstractmethods__", "__annotations__", "__weakref__", "_is_protocol", + "_is_runtime_protocol", "__dict__", "__slots__", "__parameters__", + "__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__", + "__subclasshook__", "__orig_class__", "__init__", "__new__", + "__protocol_attrs__", "__callable_proto_members_only__", +} + +if sys.version_info < (3, 8): + _EXCLUDED_ATTRS |= { + "_gorg", "__next_in_mro__", "__extra__", "__tree_hash__", "__args__", + "__origin__" + } + +if sys.version_info >= (3, 9): + _EXCLUDED_ATTRS.add("__class_getitem__") + +if sys.version_info >= (3, 12): + _EXCLUDED_ATTRS.add("__type_params__") + +_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS) + + +def _get_protocol_attrs(cls): + attrs = set() + for base in cls.__mro__[:-1]: # without object + if base.__name__ in {'Protocol', 'Generic'}: + continue + annotations = getattr(base, '__annotations__', {}) + for attr in (*base.__dict__, *annotations): + if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): + attrs.add(attr) + return attrs + + +def _maybe_adjust_parameters(cls): + """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__. + + The contents of this function are very similar + to logic found in typing.Generic.__init_subclass__ + on the CPython main branch. + """ + tvars = [] + if '__orig_bases__' in cls.__dict__: + tvars = _collect_type_vars(cls.__orig_bases__) + # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...] and/or Protocol[...]. + gvars = None + for base in cls.__orig_bases__: + if (isinstance(base, typing._GenericAlias) and + base.__origin__ in (typing.Generic, Protocol)): + # for error messages + the_base = base.__origin__.__name__ + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...]" + " and/or Protocol[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) + s_args = ', '.join(str(g) for g in gvars) + raise TypeError(f"Some type variables ({s_vars}) are" + f" not listed in {the_base}[{s_args}]") + tvars = gvars + cls.__parameters__ = tuple(tvars) + + +def _caller(depth=2): + try: + return sys._getframe(depth).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): # For platforms without _getframe() + return None + + +# The performance of runtime-checkable protocols is significantly improved on Python 3.12, +# so we backport the 3.12 version of Protocol to Python <=3.11 +if sys.version_info >= (3, 12): + Protocol = typing.Protocol +else: + def _allow_reckless_class_checks(depth=3): + """Allow instance and class checks for special stdlib modules. + The abc and functools modules indiscriminately call isinstance() and + issubclass() on the whole MRO of a user class, which may contain protocols. + """ + return _caller(depth) in {'abc', 'functools', None} + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError('Protocols cannot be instantiated') + + if sys.version_info >= (3, 8): + # Inheriting from typing._ProtocolMeta isn't actually desirable, + # but is necessary to allow typing.Protocol and typing_extensions.Protocol + # to mix without getting TypeErrors about "metaclass conflict" + _typing_Protocol = typing.Protocol + _ProtocolMetaBase = type(_typing_Protocol) + else: + _typing_Protocol = _marker + _ProtocolMetaBase = abc.ABCMeta + + class _ProtocolMeta(_ProtocolMetaBase): + # This metaclass is somewhat unfortunate, + # but is necessary for several reasons... + # + # NOTE: DO NOT call super() in any methods in this class + # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11 + # and those are slow + def __new__(mcls, name, bases, namespace, **kwargs): + if name == "Protocol" and len(bases) < 2: + pass + elif {Protocol, _typing_Protocol} & set(bases): + for base in bases: + if not ( + base in {object, typing.Generic, Protocol, _typing_Protocol} + or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) + or is_protocol(base) + ): + raise TypeError( + f"Protocols can only inherit from other protocols, " + f"got {base!r}" + ) + return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) + + def __init__(cls, *args, **kwargs): + abc.ABCMeta.__init__(cls, *args, **kwargs) + if getattr(cls, "_is_protocol", False): + cls.__protocol_attrs__ = _get_protocol_attrs(cls) + # PEP 544 prohibits using issubclass() + # with protocols that have non-method members. + cls.__callable_proto_members_only__ = all( + callable(getattr(cls, attr, None)) for attr in cls.__protocol_attrs__ + ) + + def __subclasscheck__(cls, other): + if cls is Protocol: + return type.__subclasscheck__(cls, other) + if ( + getattr(cls, '_is_protocol', False) + and not _allow_reckless_class_checks() + ): + if not isinstance(other, type): + # Same error message as for issubclass(1, int). + raise TypeError('issubclass() arg 1 must be a class') + if ( + not cls.__callable_proto_members_only__ + and cls.__dict__.get("__subclasshook__") is _proto_hook + ): + raise TypeError( + "Protocols with non-method members don't support issubclass()" + ) + if not getattr(cls, '_is_runtime_protocol', False): + raise TypeError( + "Instance and class checks can only be used with " + "@runtime_checkable protocols" + ) + return abc.ABCMeta.__subclasscheck__(cls, other) + + def __instancecheck__(cls, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if cls is Protocol: + return type.__instancecheck__(cls, instance) + if not getattr(cls, "_is_protocol", False): + # i.e., it's a concrete subclass of a protocol + return abc.ABCMeta.__instancecheck__(cls, instance) + + if ( + not getattr(cls, '_is_runtime_protocol', False) and + not _allow_reckless_class_checks() + ): + raise TypeError("Instance and class checks can only be used with" + " @runtime_checkable protocols") + + if abc.ABCMeta.__instancecheck__(cls, instance): + return True + + for attr in cls.__protocol_attrs__: + try: + val = inspect.getattr_static(instance, attr) + except AttributeError: + break + if val is None and callable(getattr(cls, attr, None)): + break + else: + return True + + return False + + def __eq__(cls, other): + # Hack so that typing.Generic.__class_getitem__ + # treats typing_extensions.Protocol + # as equivalent to typing.Protocol on Python 3.8+ + if abc.ABCMeta.__eq__(cls, other) is True: + return True + return ( + cls is Protocol and other is getattr(typing, "Protocol", object()) + ) + + # This has to be defined, or the abc-module cache + # complains about classes with this metaclass being unhashable, + # if we define only __eq__! + def __hash__(cls) -> int: + return type.__hash__(cls) + + @classmethod + def _proto_hook(cls, other): + if not cls.__dict__.get('_is_protocol', False): + return NotImplemented + + for attr in cls.__protocol_attrs__: + for base in other.__mro__: + # Check if the members appears in the class dictionary... + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + + # ...or in annotations, if it is a sub-protocol. + annotations = getattr(base, '__annotations__', {}) + if ( + isinstance(annotations, collections.abc.Mapping) + and attr in annotations + and is_protocol(other) + ): + break + else: + return NotImplemented + return True + + if sys.version_info >= (3, 8): + class Protocol(typing.Generic, metaclass=_ProtocolMeta): + __doc__ = typing.Protocol.__doc__ + __slots__ = () + _is_protocol = True + _is_runtime_protocol = False + + def __init_subclass__(cls, *args, **kwargs): + super().__init_subclass__(*args, **kwargs) + + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', False): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) + + # Set (or override) the protocol subclass hook. + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # Prohibit instantiation for protocol classes + if cls._is_protocol and cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init + + else: + class Protocol(metaclass=_ProtocolMeta): + # There is quite a lot of overlapping code with typing.Generic. + # Unfortunately it is hard to avoid this on Python <3.8, + # as the typing module on Python 3.7 doesn't let us subclass typing.Generic! + """Base class for protocol classes. Protocol classes are defined as:: + + class Proto(Protocol): + def meth(self) -> int: + ... + + Such classes are primarily used with static type checkers that recognize + structural subtyping (static duck-typing), for example:: + + class C: + def meth(self) -> int: + return 0 + + def func(x: Proto) -> int: + return x.meth() + + func(C()) # Passes static type check + + See PEP 544 for details. Protocol classes decorated with + @typing_extensions.runtime_checkable act + as simple-minded runtime-checkable protocols that check + only the presence of given attributes, ignoring their type signatures. + + Protocol classes can be generic, they are defined as:: + + class GenProto(Protocol[T]): + def meth(self) -> T: + ... + """ + __slots__ = () + _is_protocol = True + _is_runtime_protocol = False + + def __new__(cls, *args, **kwds): + if cls is Protocol: + raise TypeError("Type Protocol cannot be instantiated; " + "it can only be used as a base class") + return super().__new__(cls) + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple): + params = (params,) + if not params and cls is not typing.Tuple: + raise TypeError( + f"Parameter list to {cls.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) + if cls is Protocol: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, typing.TypeVar) for p in params): + i = 0 + while isinstance(params[i], typing.TypeVar): + i += 1 + raise TypeError( + "Parameters to Protocol[...] must all be type variables." + f" Parameter {i + 1} is {params[i]}") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Protocol[...] must all be unique") + else: + # Subscripting a regular Generic subclass. + _check_generic(cls, params, len(cls.__parameters__)) + return typing._GenericAlias(cls, params) + + def __init_subclass__(cls, *args, **kwargs): + if '__orig_bases__' in cls.__dict__: + error = typing.Generic in cls.__orig_bases__ + else: + error = typing.Generic in cls.__bases__ + if error: + raise TypeError("Cannot inherit from plain Generic") + _maybe_adjust_parameters(cls) + + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', None): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) + + # Set (or override) the protocol subclass hook. + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # Prohibit instantiation for protocol classes + if cls._is_protocol and cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init + + +if sys.version_info >= (3, 8): + runtime_checkable = typing.runtime_checkable +else: + def runtime_checkable(cls): + """Mark a protocol class as a runtime protocol, so that it + can be used with isinstance() and issubclass(). Raise TypeError + if applied to a non-protocol class. + + This allows a simple-minded structural check very similar to the + one-offs in collections.abc such as Hashable. + """ + if not ( + (isinstance(cls, _ProtocolMeta) or issubclass(cls, typing.Generic)) + and getattr(cls, "_is_protocol", False) + ): + raise TypeError('@runtime_checkable can be only applied to protocol classes,' + f' got {cls!r}') + cls._is_runtime_protocol = True + return cls + + +# Exists for backwards compatibility. +runtime = runtime_checkable + + +# Our version of runtime-checkable protocols is faster on Python 3.7-3.11 +if sys.version_info >= (3, 12): + SupportsInt = typing.SupportsInt + SupportsFloat = typing.SupportsFloat + SupportsComplex = typing.SupportsComplex + SupportsBytes = typing.SupportsBytes + SupportsIndex = typing.SupportsIndex + SupportsAbs = typing.SupportsAbs + SupportsRound = typing.SupportsRound +else: + @runtime_checkable + class SupportsInt(Protocol): + """An ABC with one abstract method __int__.""" + __slots__ = () + + @abc.abstractmethod + def __int__(self) -> int: + pass + + @runtime_checkable + class SupportsFloat(Protocol): + """An ABC with one abstract method __float__.""" + __slots__ = () + + @abc.abstractmethod + def __float__(self) -> float: + pass + + @runtime_checkable + class SupportsComplex(Protocol): + """An ABC with one abstract method __complex__.""" + __slots__ = () + + @abc.abstractmethod + def __complex__(self) -> complex: + pass + + @runtime_checkable + class SupportsBytes(Protocol): + """An ABC with one abstract method __bytes__.""" + __slots__ = () + + @abc.abstractmethod + def __bytes__(self) -> bytes: + pass + + @runtime_checkable + class SupportsIndex(Protocol): + __slots__ = () + + @abc.abstractmethod + def __index__(self) -> int: + pass + + @runtime_checkable + class SupportsAbs(Protocol[T_co]): + """ + An ABC with one abstract method __abs__ that is covariant in its return type. + """ + __slots__ = () + + @abc.abstractmethod + def __abs__(self) -> T_co: + pass + + @runtime_checkable + class SupportsRound(Protocol[T_co]): + """ + An ABC with one abstract method __round__ that is covariant in its return type. + """ + __slots__ = () + + @abc.abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +def _ensure_subclassable(mro_entries): + def inner(func): + if sys.implementation.name == "pypy" and sys.version_info < (3, 9): + cls_dict = { + "__call__": staticmethod(func), + "__mro_entries__": staticmethod(mro_entries) + } + t = type(func.__name__, (), cls_dict) + return functools.update_wrapper(t(), func) + else: + func.__mro_entries__ = mro_entries + return func + return inner + + +if sys.version_info >= (3, 13): + # The standard library TypedDict in Python 3.8 does not store runtime information + # about which (if any) keys are optional. See https://bugs.python.org/issue38834 + # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" + # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 + # The standard library TypedDict below Python 3.11 does not store runtime + # information about optional and required keys when using Required or NotRequired. + # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. + # Aaaand on 3.12 we add __orig_bases__ to TypedDict + # to enable better runtime introspection. + # On 3.13 we deprecate some odd ways of creating TypedDicts. + TypedDict = typing.TypedDict + _TypedDictMeta = typing._TypedDictMeta + is_typeddict = typing.is_typeddict +else: + # 3.10.0 and later + _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters + + if sys.version_info >= (3, 8): + _fake_name = "Protocol" + else: + _fake_name = "_Protocol" + + class _TypedDictMeta(type): + def __new__(cls, name, bases, ns, total=True): + """Create new typed dict class object. + + This method is called when TypedDict is subclassed, + or when TypedDict is instantiated. This way + TypedDict supports all three syntax forms described in its docstring. + Subclasses and instances of TypedDict return actual dictionaries. + """ + for base in bases: + if type(base) is not _TypedDictMeta and base is not typing.Generic: + raise TypeError('cannot inherit from both a TypedDict type ' + 'and a non-TypedDict base class') + + if any(issubclass(b, typing.Generic) for b in bases): + generic_base = (typing.Generic,) + else: + generic_base = () + + # typing.py generally doesn't let you inherit from plain Generic, unless + # the name of the class happens to be "Protocol" (or "_Protocol" on 3.7). + tp_dict = type.__new__(_TypedDictMeta, _fake_name, (*generic_base, dict), ns) + tp_dict.__name__ = name + if tp_dict.__qualname__ == _fake_name: + tp_dict.__qualname__ = name + + if not hasattr(tp_dict, '__orig_bases__'): + tp_dict.__orig_bases__ = bases + + annotations = {} + own_annotations = ns.get('__annotations__', {}) + msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" + if _TAKES_MODULE: + own_annotations = { + n: typing._type_check(tp, msg, module=tp_dict.__module__) + for n, tp in own_annotations.items() + } + else: + own_annotations = { + n: typing._type_check(tp, msg) + for n, tp in own_annotations.items() + } + required_keys = set() + optional_keys = set() + + for base in bases: + annotations.update(base.__dict__.get('__annotations__', {})) + required_keys.update(base.__dict__.get('__required_keys__', ())) + optional_keys.update(base.__dict__.get('__optional_keys__', ())) + + annotations.update(own_annotations) + for annotation_key, annotation_type in own_annotations.items(): + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + annotation_origin = get_origin(annotation_type) + + if annotation_origin is Required: + required_keys.add(annotation_key) + elif annotation_origin is NotRequired: + optional_keys.add(annotation_key) + elif total: + required_keys.add(annotation_key) + else: + optional_keys.add(annotation_key) + + tp_dict.__annotations__ = annotations + tp_dict.__required_keys__ = frozenset(required_keys) + tp_dict.__optional_keys__ = frozenset(optional_keys) + if not hasattr(tp_dict, '__total__'): + tp_dict.__total__ = total + return tp_dict + + __call__ = dict # static method + + def __subclasscheck__(cls, other): + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') + + __instancecheck__ = __subclasscheck__ + + _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) + + @_ensure_subclassable(lambda bases: (_TypedDict,)) + def TypedDict(__typename, __fields=_marker, *, total=True, **kwargs): + """A simple typed namespace. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type such that a type checker will expect all + instances to have a certain set of keys, where each key is + associated with a value of a consistent type. This expectation + is not checked at runtime. + + Usage:: + + class Point2D(TypedDict): + x: int + y: int + label: str + + a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK + b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check + + assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') + + The type info can be accessed via the Point2D.__annotations__ dict, and + the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. + TypedDict supports an additional equivalent form:: + + Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) + + By default, all keys must be present in a TypedDict. It is possible + to override this by specifying totality:: + + class Point2D(TypedDict, total=False): + x: int + y: int + + This means that a Point2D TypedDict can have any of the keys omitted. A type + checker is only expected to support a literal False or True as the value of + the total argument. True is the default, and makes all items defined in the + class body be required. + + The Required and NotRequired special forms can also be used to mark + individual keys as being required or not required:: + + class Point2D(TypedDict): + x: int # the "x" key must always be present (Required is the default) + y: NotRequired[int] # the "y" key can be omitted + + See PEP 655 for more details on Required and NotRequired. + """ + if __fields is _marker or __fields is None: + if __fields is _marker: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + + example = f"`{__typename} = TypedDict({__typename!r}, {{}})`" + deprecation_msg = ( + f"{deprecated_thing} is deprecated and will be disallowed in " + "Python 3.15. To create a TypedDict class with 0 fields " + "using the functional syntax, pass an empty dictionary, e.g. " + ) + example + "." + warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) + __fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + if kwargs: + warnings.warn( + "The kwargs-based syntax for TypedDict definitions is deprecated " + "in Python 3.11, will be removed in Python 3.13, and may not be " + "understood by third-party type checkers.", + DeprecationWarning, + stacklevel=2, + ) + + ns = {'__annotations__': dict(__fields)} + module = _caller() + if module is not None: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = module + + td = _TypedDictMeta(__typename, (), ns, total=total) + td.__orig_bases__ = (TypedDict,) + return td + + if hasattr(typing, "_TypedDictMeta"): + _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) + else: + _TYPEDDICT_TYPES = (_TypedDictMeta,) + + def is_typeddict(tp): + """Check if an annotation is a TypedDict class + + For example:: + class Film(TypedDict): + title: str + year: int + + is_typeddict(Film) # => True + is_typeddict(Union[list, str]) # => False + """ + # On 3.8, this would otherwise return True + if hasattr(typing, "TypedDict") and tp is typing.TypedDict: + return False + return isinstance(tp, _TYPEDDICT_TYPES) + + +if hasattr(typing, "assert_type"): + assert_type = typing.assert_type + +else: + def assert_type(__val, __typ): + """Assert (to the type checker) that the value is of the given type. + + When the type checker encounters a call to assert_type(), it + emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # ok + assert_type(name, int) # type checker error + + At runtime this returns the first argument unchanged and otherwise + does nothing. + """ + return __val + + +if hasattr(typing, "Required"): + get_type_hints = typing.get_type_hints +else: + # replaces _strip_annotations() + def _strip_extras(t): + """Strips Annotated, Required and NotRequired from a given type.""" + if isinstance(t, _AnnotatedAlias): + return _strip_extras(t.__origin__) + if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired): + return _strip_extras(t.__args__[0]) + if isinstance(t, typing._GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return t.copy_with(stripped_args) + if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return _types.GenericAlias(t.__origin__, stripped_args) + if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return functools.reduce(operator.or_, stripped_args) + + return t + + def get_type_hints(obj, globalns=None, localns=None, include_extras=False): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' + (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if hasattr(typing, "Annotated"): + hint = typing.get_type_hints( + obj, globalns=globalns, localns=localns, include_extras=True + ) + else: + hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) + if include_extras: + return hint + return {k: _strip_extras(t) for k, t in hint.items()} + + +# Python 3.9+ has PEP 593 (Annotated) +if hasattr(typing, 'Annotated'): + Annotated = typing.Annotated + # Not exported and not a public API, but needed for get_origin() and get_args() + # to work. + _AnnotatedAlias = typing._AnnotatedAlias +# 3.7-3.8 +else: + class _AnnotatedAlias(typing._GenericAlias, _root=True): + """Runtime representation of an annotated type. + + At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' + with extra annotations. The alias behaves like a normal typing alias, + instantiating is the same as instantiating the underlying type, binding + it to types is also the same. + """ + def __init__(self, origin, metadata): + if isinstance(origin, _AnnotatedAlias): + metadata = origin.__metadata__ + metadata + origin = origin.__origin__ + super().__init__(origin, origin) + self.__metadata__ = metadata + + def copy_with(self, params): + assert len(params) == 1 + new_type = params[0] + return _AnnotatedAlias(new_type, self.__metadata__) + + def __repr__(self): + return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " + f"{', '.join(repr(a) for a in self.__metadata__)}]") + + def __reduce__(self): + return operator.getitem, ( + Annotated, (self.__origin__,) + self.__metadata__ + ) + + def __eq__(self, other): + if not isinstance(other, _AnnotatedAlias): + return NotImplemented + if self.__origin__ != other.__origin__: + return False + return self.__metadata__ == other.__metadata__ + + def __hash__(self): + return hash((self.__origin__, self.__metadata__)) + + class Annotated: + """Add context specific metadata to a type. + + Example: Annotated[int, runtime_check.Unsigned] indicates to the + hypothetical runtime_check module that this type is an unsigned int. + Every other consumer of this type can ignore this metadata and treat + this type as int. + + The first argument to Annotated must be a valid type (and will be in + the __origin__ field), the remaining arguments are kept as a tuple in + the __extra__ field. + + Details: + + - It's an error to call `Annotated` with less than two arguments. + - Nested Annotated are flattened:: + + Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] + + - Instantiating an annotated type is equivalent to instantiating the + underlying type:: + + Annotated[C, Ann1](5) == C(5) + + - Annotated can be used as a generic type alias:: + + Optimized = Annotated[T, runtime.Optimize()] + Optimized[int] == Annotated[int, runtime.Optimize()] + + OptimizedList = Annotated[List[T], runtime.Optimize()] + OptimizedList[int] == Annotated[List[int], runtime.Optimize()] + """ + + __slots__ = () + + def __new__(cls, *args, **kwargs): + raise TypeError("Type Annotated cannot be instantiated.") + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple) or len(params) < 2: + raise TypeError("Annotated[...] should be used " + "with at least two arguments (a type and an " + "annotation).") + allowed_special_forms = (ClassVar, Final) + if get_origin(params[0]) in allowed_special_forms: + origin = params[0] + else: + msg = "Annotated[t, ...]: t must be a type." + origin = typing._type_check(params[0], msg) + metadata = tuple(params[1:]) + return _AnnotatedAlias(origin, metadata) + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + f"Cannot subclass {cls.__module__}.Annotated" + ) + +# Python 3.8 has get_origin() and get_args() but those implementations aren't +# Annotated-aware, so we can't use those. Python 3.9's versions don't support +# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. +if sys.version_info[:2] >= (3, 10): + get_origin = typing.get_origin + get_args = typing.get_args +# 3.7-3.9 +else: + try: + # 3.9+ + from typing import _BaseGenericAlias + except ImportError: + _BaseGenericAlias = typing._GenericAlias + try: + # 3.9+ + from typing import GenericAlias as _typing_GenericAlias + except ImportError: + _typing_GenericAlias = typing._GenericAlias + + def get_origin(tp): + """Get the unsubscripted version of a type. + + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar + and Annotated. Return None for unsupported types. Examples:: + + get_origin(Literal[42]) is Literal + get_origin(int) is None + get_origin(ClassVar[int]) is ClassVar + get_origin(Generic) is Generic + get_origin(Generic[T]) is Generic + get_origin(Union[T, int]) is Union + get_origin(List[Tuple[T, T]][int]) == list + get_origin(P.args) is P + """ + if isinstance(tp, _AnnotatedAlias): + return Annotated + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias, + ParamSpecArgs, ParamSpecKwargs)): + return tp.__origin__ + if tp is typing.Generic: + return typing.Generic + return None + + def get_args(tp): + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if isinstance(tp, _AnnotatedAlias): + return (tp.__origin__,) + tp.__metadata__ + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)): + if getattr(tp, "_special", False): + return () + res = tp.__args__ + if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return () + + +# 3.10+ +if hasattr(typing, 'TypeAlias'): + TypeAlias = typing.TypeAlias +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def TypeAlias(self, parameters): + """Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example above. + """ + raise TypeError(f"{self} is not subscriptable") +# 3.7-3.8 +else: + TypeAlias = _ExtensionsSpecialForm( + 'TypeAlias', + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example + above.""" + ) + + +def _set_default(type_param, default): + if isinstance(default, (tuple, list)): + type_param.__default__ = tuple((typing._type_check(d, "Default must be a type") + for d in default)) + elif default != _marker: + type_param.__default__ = typing._type_check(default, "Default must be a type") + else: + type_param.__default__ = None + + +def _set_module(typevarlike): + # for pickling: + def_mod = _caller(depth=3) + if def_mod != 'typing_extensions': + typevarlike.__module__ = def_mod + + +class _DefaultMixin: + """Mixin for TypeVarLike defaults.""" + + __slots__ = () + __init__ = _set_default + + +# Classes using this metaclass must provide a _backported_typevarlike ClassVar +class _TypeVarLikeMeta(type): + def __instancecheck__(cls, __instance: Any) -> bool: + return isinstance(__instance, cls._backported_typevarlike) + + +# Add default and infer_variance parameters from PEP 696 and 695 +class TypeVar(metaclass=_TypeVarLikeMeta): + """Type variable.""" + + _backported_typevarlike = typing.TypeVar + + def __new__(cls, name, *constraints, bound=None, + covariant=False, contravariant=False, + default=_marker, infer_variance=False): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented, can pass infer_variance to typing.TypeVar + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant, + infer_variance=infer_variance) + else: + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + if infer_variance and (covariant or contravariant): + raise ValueError("Variance cannot be specified with infer_variance.") + typevar.__infer_variance__ = infer_variance + _set_default(typevar, default) + _set_module(typevar) + return typevar + + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") + + +# Python 3.10+ has PEP 612 +if hasattr(typing, 'ParamSpecArgs'): + ParamSpecArgs = typing.ParamSpecArgs + ParamSpecKwargs = typing.ParamSpecKwargs +# 3.7-3.9 +else: + class _Immutable: + """Mixin to indicate that object should not be copied.""" + __slots__ = () + + def __copy__(self): + return self + + def __deepcopy__(self, memo): + return self + + class ParamSpecArgs(_Immutable): + """The args for a ParamSpec object. + + Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + + ParamSpecArgs objects have a reference back to their ParamSpec: + + P.args.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.args" + + def __eq__(self, other): + if not isinstance(other, ParamSpecArgs): + return NotImplemented + return self.__origin__ == other.__origin__ + + class ParamSpecKwargs(_Immutable): + """The kwargs for a ParamSpec object. + + Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + + ParamSpecKwargs objects have a reference back to their ParamSpec: + + P.kwargs.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.kwargs" + + def __eq__(self, other): + if not isinstance(other, ParamSpecKwargs): + return NotImplemented + return self.__origin__ == other.__origin__ + +# 3.10+ +if hasattr(typing, 'ParamSpec'): + + # Add default parameter - PEP 696 + class ParamSpec(metaclass=_TypeVarLikeMeta): + """Parameter specification.""" + + _backported_typevarlike = typing.ParamSpec + + def __new__(cls, name, *, bound=None, + covariant=False, contravariant=False, + infer_variance=False, default=_marker): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented, can pass infer_variance to typing.TypeVar + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant, + infer_variance=infer_variance) + else: + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant) + paramspec.__infer_variance__ = infer_variance + + _set_default(paramspec, default) + _set_module(paramspec) + return paramspec + + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") + +# 3.7-3.9 +else: + + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class ParamSpec(list, _DefaultMixin): + """Parameter specification variable. + + Usage:: + + P = ParamSpec('P') + + Parameter specification variables exist primarily for the benefit of static + type checkers. They are used to forward the parameter types of one + callable to another callable, a pattern commonly found in higher order + functions and decorators. They are only valid when used in ``Concatenate``, + or s the first argument to ``Callable``. In Python 3.10 and higher, + they are also supported in user-defined Generics at runtime. + See class Generic for more information on generic types. An + example for annotating a decorator:: + + T = TypeVar('T') + P = ParamSpec('P') + + def add_logging(f: Callable[P, T]) -> Callable[P, T]: + '''A type-safe decorator to add logging to a function.''' + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + logging.info(f'{f.__name__} was called') + return f(*args, **kwargs) + return inner + + @add_logging + def add_two(x: float, y: float) -> float: + '''Add two numbers together.''' + return x + y + + Parameter specification variables defined with covariant=True or + contravariant=True can be used to declare covariant or contravariant + generic types. These keyword arguments are valid, but their actual semantics + are yet to be decided. See PEP 612 for details. + + Parameter specification variables can be introspected. e.g.: + + P.__name__ == 'T' + P.__bound__ == None + P.__covariant__ == False + P.__contravariant__ == False + + Note that only parameter specification variables defined in global scope can + be pickled. + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + @property + def args(self): + return ParamSpecArgs(self) + + @property + def kwargs(self): + return ParamSpecKwargs(self) + + def __init__(self, name, *, bound=None, covariant=False, contravariant=False, + infer_variance=False, default=_marker): + super().__init__([self]) + self.__name__ = name + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + self.__infer_variance__ = bool(infer_variance) + if bound: + self.__bound__ = typing._type_check(bound, 'Bound must be a type.') + else: + self.__bound__ = None + _DefaultMixin.__init__(self, default) + + # for pickling: + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __repr__(self): + if self.__infer_variance__: + prefix = '' + elif self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + # Hack to get typing._type_check to pass. + def __call__(self, *args, **kwargs): + pass + + +# 3.7-3.9 +if not hasattr(typing, 'Concatenate'): + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class _ConcatenateGenericAlias(list): + + # Trick Generic into looking into this for __parameters__. + __class__ = typing._GenericAlias + + # Flag in 3.8. + _special = False + + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args + + def __repr__(self): + _type_repr = typing._type_repr + return (f'{_type_repr(self.__origin__)}' + f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + + # Hack to get typing._type_check to pass in Generic. + def __call__(self, *args, **kwargs): + pass + + @property + def __parameters__(self): + return tuple( + tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) + ) + + +# 3.7-3.9 +@typing._tp_cache +def _concatenate_getitem(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Concatenate of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if not isinstance(parameters[-1], ParamSpec): + raise TypeError("The last parameter to Concatenate should be a " + "ParamSpec variable.") + msg = "Concatenate[arg, ...]: each arg must be a type." + parameters = tuple(typing._type_check(p, msg) for p in parameters) + return _ConcatenateGenericAlias(self, parameters) + + +# 3.10+ +if hasattr(typing, 'Concatenate'): + Concatenate = typing.Concatenate + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811 +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def Concatenate(self, parameters): + """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """ + return _concatenate_getitem(self, parameters) +# 3.7-8 +else: + class _ConcatenateForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + Concatenate = _ConcatenateForm( + 'Concatenate', + doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """) + +# 3.10+ +if hasattr(typing, 'TypeGuard'): + TypeGuard = typing.TypeGuard +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def TypeGuard(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """ + item = typing._type_check(parameters, f'{self} accepts only a single type.') + return typing._GenericAlias(self, (item,)) +# 3.7-3.8 +else: + class _TypeGuardForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type') + return typing._GenericAlias(self, (item,)) + + TypeGuard = _TypeGuardForm( + 'TypeGuard', + doc="""Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """) + + +# Vendored from cpython typing._SpecialFrom +class _SpecialForm(typing._Final, _root=True): + __slots__ = ('_name', '__doc__', '_getitem') + + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ + + def __getattr__(self, item): + if item in {'__name__', '__qualname__'}: + return self._name + + raise AttributeError(item) + + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") + + def __repr__(self): + return f'typing_extensions.{self._name}' + + def __reduce__(self): + return self._name + + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") + + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) + + +if hasattr(typing, "LiteralString"): + LiteralString = typing.LiteralString +else: + @_SpecialForm + def LiteralString(self, params): + """Represents an arbitrary literal string. + + Example:: + + from typing_extensions import LiteralString + + def query(sql: LiteralString) -> ...: + ... + + query("SELECT * FROM table") # ok + query(f"SELECT * FROM {input()}") # not ok + + See PEP 675 for details. + + """ + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Self"): + Self = typing.Self +else: + @_SpecialForm + def Self(self, params): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Never"): + Never = typing.Never +else: + @_SpecialForm + def Never(self, params): + """The bottom type, a type that has no members. + + This can be used to define a function that should never be + called, or a function that never returns:: + + from typing_extensions import Never + + def never_call_me(arg: Never) -> None: + pass + + def int_or_str(arg: int | str) -> None: + never_call_me(arg) # type checker error + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + never_call_me(arg) # ok, arg is of type Never + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, 'Required'): + Required = typing.Required + NotRequired = typing.NotRequired +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def Required(self, parameters): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + @_ExtensionsSpecialForm + def NotRequired(self, parameters): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + +else: + class _RequiredForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + Required = _RequiredForm( + 'Required', + doc="""A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """) + NotRequired = _RequiredForm( + 'NotRequired', + doc="""A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """) + + +_UNPACK_DOC = """\ +Type unpack operator. + +The type unpack operator takes the child types from some container type, +such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For +example: + + # For some generic class `Foo`: + Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] + + Ts = TypeVarTuple('Ts') + # Specifies that `Bar` is generic in an arbitrary number of types. + # (Think of `Ts` as a tuple of an arbitrary number of individual + # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the + # `Generic[]`.) + class Bar(Generic[Unpack[Ts]]): ... + Bar[int] # Valid + Bar[int, str] # Also valid + +From Python 3.11, this can also be done using the `*` operator: + + Foo[*tuple[int, str]] + class Bar(Generic[*Ts]): ... + +The operator can also be used along with a `TypedDict` to annotate +`**kwargs` in a function signature. For instance: + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - *name* of type `str` and + # *year* of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + +Note that there is only some runtime checking of this operator. Not +everything the runtime allows may be accepted by static type checkers. + +For more information, see PEP 646 and PEP 692. +""" + + +if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[] + Unpack = typing.Unpack + + def _is_unpack(obj): + return get_origin(obj) is Unpack + +elif sys.version_info[:2] >= (3, 9): + class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, getitem): + super().__init__(getitem) + self.__doc__ = _UNPACK_DOC + + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + @_UnpackSpecialForm + def Unpack(self, parameters): + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + +else: + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + class _UnpackForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + + +if hasattr(typing, "TypeVarTuple"): # 3.11+ + + # Add default parameter - PEP 696 + class TypeVarTuple(metaclass=_TypeVarLikeMeta): + """Type variable tuple.""" + + _backported_typevarlike = typing.TypeVarTuple + + def __new__(cls, name, *, default=_marker): + tvt = typing.TypeVarTuple(name) + _set_default(tvt, default) + _set_module(tvt) + return tvt + + def __init_subclass__(self, *args, **kwds): + raise TypeError("Cannot subclass special typing classes") + +else: + class TypeVarTuple(_DefaultMixin): + """Type variable tuple. + + Usage:: + + Ts = TypeVarTuple('Ts') + + In the same way that a normal type variable is a stand-in for a single + type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* + type such as ``Tuple[int, str]``. + + Type variable tuples can be used in ``Generic`` declarations. + Consider the following example:: + + class Array(Generic[*Ts]): ... + + The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, + where ``T1`` and ``T2`` are type variables. To use these type variables + as type parameters of ``Array``, we must *unpack* the type variable tuple using + the star operator: ``*Ts``. The signature of ``Array`` then behaves + as if we had simply written ``class Array(Generic[T1, T2]): ...``. + In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows + us to parameterise the class with an *arbitrary* number of type parameters. + + Type variable tuples can be used anywhere a normal ``TypeVar`` can. + This includes class definitions, as shown above, as well as function + signatures and variable annotations:: + + class Array(Generic[*Ts]): + + def __init__(self, shape: Tuple[*Ts]): + self._shape: Tuple[*Ts] = shape + + def get_shape(self) -> Tuple[*Ts]: + return self._shape + + shape = (Height(480), Width(640)) + x: Array[Height, Width] = Array(shape) + y = abs(x) # Inferred type is Array[Height, Width] + z = x + x # ... is Array[Height, Width] + x.get_shape() # ... is tuple[Height, Width] + + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + def __iter__(self): + yield self.__unpacked__ + + def __init__(self, name, *, default=_marker): + self.__name__ = name + _DefaultMixin.__init__(self, default) + + # for pickling: + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + self.__unpacked__ = Unpack[self] + + def __repr__(self): + return self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(self, *args, **kwds): + if '_root' not in kwds: + raise TypeError("Cannot subclass special typing classes") + + +if hasattr(typing, "reveal_type"): + reveal_type = typing.reveal_type +else: + def reveal_type(__obj: T) -> T: + """Reveal the inferred type of a variable. + + When a static type checker encounters a call to ``reveal_type()``, + it will emit the inferred type of the argument:: + + x: int = 1 + reveal_type(x) + + Running a static type checker (e.g., ``mypy``) on this example + will produce output similar to 'Revealed type is "builtins.int"'. + + At runtime, the function prints the runtime type of the + argument and returns it unchanged. + + """ + print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr) + return __obj + + +if hasattr(typing, "assert_never"): + assert_never = typing.assert_never +else: + def assert_never(__arg: Never) -> Never: + """Assert to the type checker that a line of code is unreachable. + + Example:: + + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) + + If a type checker finds that a call to assert_never() is + reachable, it will emit an error. + + At runtime, this throws an exception when called. + + """ + raise AssertionError("Expected code to be unreachable") + + +if sys.version_info >= (3, 12): + # dataclass_transform exists in 3.11 but lacks the frozen_default parameter + dataclass_transform = typing.dataclass_transform +else: + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + frozen_default: bool = False, + field_specifiers: typing.Tuple[ + typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], + ... + ] = (), + **kwargs: typing.Any, + ) -> typing.Callable[[T], T]: + """Decorator that marks a function, class, or metaclass as providing + dataclass-like behavior. + + Example: + + from typing_extensions import dataclass_transform + + _T = TypeVar("_T") + + # Used on a decorator function + @dataclass_transform() + def create_model(cls: type[_T]) -> type[_T]: + ... + return cls + + @create_model + class CustomerModel: + id: int + name: str + + # Used on a base class + @dataclass_transform() + class ModelBase: ... + + class CustomerModel(ModelBase): + id: int + name: str + + # Used on a metaclass + @dataclass_transform() + class ModelMeta(type): ... + + class ModelBase(metaclass=ModelMeta): ... + + class CustomerModel(ModelBase): + id: int + name: str + + Each of the ``CustomerModel`` classes defined in this example will now + behave similarly to a dataclass created with the ``@dataclasses.dataclass`` + decorator. For example, the type checker will synthesize an ``__init__`` + method. + + The arguments to this decorator can be used to customize this behavior: + - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + True or False if it is omitted by the caller. + - ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``frozen_default`` indicates whether the ``frozen`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. + + At runtime, this decorator records its arguments in the + ``__dataclass_transform__`` attribute on the decorated object. + + See PEP 681 for details. + + """ + def decorator(cls_or_fn): + cls_or_fn.__dataclass_transform__ = { + "eq_default": eq_default, + "order_default": order_default, + "kw_only_default": kw_only_default, + "frozen_default": frozen_default, + "field_specifiers": field_specifiers, + "kwargs": kwargs, + } + return cls_or_fn + return decorator + + +if hasattr(typing, "override"): + override = typing.override +else: + _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) + + def override(__arg: _F) -> _F: + """Indicate that a method is intended to override a method in a base class. + + Usage: + + class Base: + def method(self) -> None: ... + pass + + class Child(Base): + @override + def method(self) -> None: + super().method() + + When this decorator is applied to a method, the type checker will + validate that it overrides a method with the same name on a base class. + This helps prevent bugs that may occur when a base class is changed + without an equivalent change to a child class. + + There is no runtime checking of these properties. The decorator + sets the ``__override__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + + See PEP 698 for details. + + """ + try: + __arg.__override__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return __arg + + +if hasattr(typing, "deprecated"): + deprecated = typing.deprecated +else: + _T = typing.TypeVar("_T") + + def deprecated( + __msg: str, + *, + category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, + stacklevel: int = 1, + ) -> typing.Callable[[_T], _T]: + """Indicate that a class, function or overload is deprecated. + + Usage: + + @deprecated("Use B instead") + class A: + pass + + @deprecated("Use g instead") + def f(): + pass + + @overload + @deprecated("int support is deprecated") + def g(x: int) -> int: ... + @overload + def g(x: str) -> int: ... + + When this decorator is applied to an object, the type checker + will generate a diagnostic on usage of the deprecated object. + + The warning specified by ``category`` will be emitted on use + of deprecated objects. For functions, that happens on calls; + for classes, on instantiation. If the ``category`` is ``None``, + no warning is emitted. The ``stacklevel`` determines where the + warning is emitted. If it is ``1`` (the default), the warning + is emitted at the direct caller of the deprecated object; if it + is higher, it is emitted further up the stack. + + The decorator sets the ``__deprecated__`` + attribute on the decorated object to the deprecation message + passed to the decorator. If applied to an overload, the decorator + must be after the ``@overload`` decorator for the attribute to + exist on the overload as returned by ``get_overloads()``. + + See PEP 702 for details. + + """ + def decorator(__arg: _T) -> _T: + if category is None: + __arg.__deprecated__ = __msg + return __arg + elif isinstance(__arg, type): + original_new = __arg.__new__ + has_init = __arg.__init__ is not object.__init__ + + @functools.wraps(original_new) + def __new__(cls, *args, **kwargs): + warnings.warn(__msg, category=category, stacklevel=stacklevel + 1) + if original_new is not object.__new__: + return original_new(cls, *args, **kwargs) + # Mirrors a similar check in object.__new__. + elif not has_init and (args or kwargs): + raise TypeError(f"{cls.__name__}() takes no arguments") + else: + return original_new(cls) + + __arg.__new__ = staticmethod(__new__) + __arg.__deprecated__ = __new__.__deprecated__ = __msg + return __arg + elif callable(__arg): + @functools.wraps(__arg) + def wrapper(*args, **kwargs): + warnings.warn(__msg, category=category, stacklevel=stacklevel + 1) + return __arg(*args, **kwargs) + + __arg.__deprecated__ = wrapper.__deprecated__ = __msg + return wrapper + else: + raise TypeError( + "@deprecated decorator with non-None category must be applied to " + f"a class or callable, not {__arg!r}" + ) + + return decorator + + +# We have to do some monkey patching to deal with the dual nature of +# Unpack/TypeVarTuple: +# - We want Unpack to be a kind of TypeVar so it gets accepted in +# Generic[Unpack[Ts]] +# - We want it to *not* be treated as a TypeVar for the purposes of +# counting generic parameters, so that when we subscript a generic, +# the runtime doesn't try to substitute the Unpack with the subscripted type. +if not hasattr(typing, "TypeVarTuple"): + typing._collect_type_vars = _collect_type_vars + typing._check_generic = _check_generic + + +# Backport typing.NamedTuple as it exists in Python 3.12. +# In 3.11, the ability to define generic `NamedTuple`s was supported. +# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. +# On 3.12, we added __orig_bases__ to call-based NamedTuples +# On 3.13, we deprecated kwargs-based NamedTuples +if sys.version_info >= (3, 13): + NamedTuple = typing.NamedTuple +else: + def _make_nmtuple(name, types, module, defaults=()): + fields = [n for n, t in types] + annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") + for n, t in types} + nm_tpl = collections.namedtuple(name, fields, + defaults=defaults, module=module) + nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations + # The `_field_types` attribute was removed in 3.9; + # in earlier versions, it is the same as the `__annotations__` attribute + if sys.version_info < (3, 9): + nm_tpl._field_types = annotations + return nm_tpl + + _prohibited_namedtuple_fields = typing._prohibited + _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) + + class _NamedTupleMeta(type): + def __new__(cls, typename, bases, ns): + assert _NamedTuple in bases + for base in bases: + if base is not _NamedTuple and base is not typing.Generic: + raise TypeError( + 'can only inherit from a NamedTuple type and Generic') + bases = tuple(tuple if base is _NamedTuple else base for base in bases) + types = ns.get('__annotations__', {}) + default_names = [] + for field_name in types: + if field_name in ns: + default_names.append(field_name) + elif default_names: + raise TypeError(f"Non-default namedtuple field {field_name} " + f"cannot follow default field" + f"{'s' if len(default_names) > 1 else ''} " + f"{', '.join(default_names)}") + nm_tpl = _make_nmtuple( + typename, types.items(), + defaults=[ns[n] for n in default_names], + module=ns['__module__'] + ) + nm_tpl.__bases__ = bases + if typing.Generic in bases: + if hasattr(typing, '_generic_class_getitem'): # 3.12+ + nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) + else: + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) + # update from user namespace without overriding special namedtuple attributes + for key in ns: + if key in _prohibited_namedtuple_fields: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special_namedtuple_fields and key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + if typing.Generic in bases: + nm_tpl.__init_subclass__() + return nm_tpl + + _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + + def _namedtuple_mro_entries(bases): + assert NamedTuple in bases + return (_NamedTuple,) + + @_ensure_subclassable(_namedtuple_mro_entries) + def NamedTuple(__typename, __fields=_marker, **kwargs): + """Typed version of namedtuple. + + Usage:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has an extra __annotations__ attribute, giving a + dict that maps field names to types. (The field names are also in + the _fields attribute, which is part of the namedtuple API.) + An alternative equivalent functional syntax is also accepted:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + if __fields is _marker: + if kwargs: + deprecated_thing = "Creating NamedTuple classes using keyword arguments" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "Use the class-based or functional syntax instead." + ) + else: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + example = f"`{__typename} = NamedTuple({__typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." + elif __fields is None: + if kwargs: + raise TypeError( + "Cannot pass `None` as the 'fields' parameter " + "and also specify fields using keyword arguments" + ) + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + example = f"`{__typename} = NamedTuple({__typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + if __fields is _marker or __fields is None: + warnings.warn( + deprecation_msg.format(name=deprecated_thing, remove="3.15"), + DeprecationWarning, + stacklevel=2, + ) + __fields = kwargs.items() + nt = _make_nmtuple(__typename, __fields, module=_caller()) + nt.__orig_bases__ = (NamedTuple,) + return nt + + # On 3.8+, alter the signature so that it matches typing.NamedTuple. + # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7, + # so just leave the signature as it is on 3.7. + if sys.version_info >= (3, 8): + _new_signature = '(typename, fields=None, /, **kwargs)' + if isinstance(NamedTuple, _types.FunctionType): + NamedTuple.__text_signature__ = _new_signature + else: + NamedTuple.__call__.__text_signature__ = _new_signature + + +if hasattr(collections.abc, "Buffer"): + Buffer = collections.abc.Buffer +else: + class Buffer(abc.ABC): + """Base class for classes that implement the buffer protocol. + + The buffer protocol allows Python objects to expose a low-level + memory buffer interface. Before Python 3.12, it is not possible + to implement the buffer protocol in pure Python code, or even + to check whether a class implements the buffer protocol. In + Python 3.12 and higher, the ``__buffer__`` method allows access + to the buffer protocol from Python code, and the + ``collections.abc.Buffer`` ABC allows checking whether a class + implements the buffer protocol. + + To indicate support for the buffer protocol in earlier versions, + inherit from this ABC, either in a stub file or at runtime, + or use ABC registration. This ABC provides no methods, because + there is no Python-accessible methods shared by pre-3.12 buffer + classes. It is useful primarily for static checks. + + """ + + # As a courtesy, register the most common stdlib buffer classes. + Buffer.register(memoryview) + Buffer.register(bytearray) + Buffer.register(bytes) + + +# Backport of types.get_original_bases, available on 3.12+ in CPython +if hasattr(_types, "get_original_bases"): + get_original_bases = _types.get_original_bases +else: + def get_original_bases(__cls): + """Return the class's "original" bases prior to modification by `__mro_entries__`. + + Examples:: + + from typing import TypeVar, Generic + from typing_extensions import NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) + """ + try: + return __cls.__orig_bases__ + except AttributeError: + try: + return __cls.__bases__ + except AttributeError: + raise TypeError( + f'Expected an instance of type, not {type(__cls).__name__!r}' + ) from None + + +# NewType is a class on Python 3.10+, making it pickleable +# The error message for subclassing instances of NewType was improved on 3.11+ +if sys.version_info >= (3, 11): + NewType = typing.NewType +else: + class NewType: + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy callable that simply returns its argument. Usage:: + UserId = NewType('UserId', int) + def name_by_id(user_id: UserId) -> str: + ... + UserId('user') # Fails type check + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + num = UserId(5) + 1 # type: int + """ + + def __call__(self, obj): + return obj + + def __init__(self, name, tp): + self.__qualname__ = name + if '.' in name: + name = name.rpartition('.')[-1] + self.__name__ = name + self.__supertype__ = tp + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __mro_entries__(self, bases): + # We defined __mro_entries__ to get a better error message + # if a user attempts to subclass a NewType instance. bpo-46170 + supercls_name = self.__name__ + + class Dummy: + def __init_subclass__(cls): + subcls_name = cls.__name__ + raise TypeError( + f"Cannot subclass an instance of NewType. " + f"Perhaps you were looking for: " + f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" + ) + + return (Dummy,) + + def __repr__(self): + return f'{self.__module__}.{self.__qualname__}' + + def __reduce__(self): + return self.__qualname__ + + if sys.version_info >= (3, 10): + # PEP 604 methods + # It doesn't make sense to have these methods on Python <3.10 + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + +if hasattr(typing, "TypeAliasType"): + TypeAliasType = typing.TypeAliasType +else: + def _is_unionable(obj): + """Corresponds to is_unionable() in unionobject.c in CPython.""" + return obj is None or isinstance(obj, ( + type, + _types.GenericAlias, + _types.UnionType, + TypeAliasType, + )) + + class TypeAliasType: + """Create named, parameterized type aliases. + + This provides a backport of the new `type` statement in Python 3.12: + + type ListOrSet[T] = list[T] | set[T] + + is equivalent to: + + T = TypeVar("T") + ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) + + The name ListOrSet can then be used as an alias for the type it refers to. + + The type_params argument should contain all the type parameters used + in the value of the type alias. If the alias is not generic, this + argument is omitted. + + Static type checkers should only support type aliases declared using + TypeAliasType that follow these rules: + + - The first argument (the name) must be a string literal. + - The TypeAliasType instance must be immediately assigned to a variable + of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, + as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). + + """ + + def __init__(self, name: str, value, *, type_params=()): + if not isinstance(name, str): + raise TypeError("TypeAliasType name must be a string") + self.__value__ = value + self.__type_params__ = type_params + + parameters = [] + for type_param in type_params: + if isinstance(type_param, TypeVarTuple): + parameters.extend(type_param) + else: + parameters.append(type_param) + self.__parameters__ = tuple(parameters) + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + # Setting this attribute closes the TypeAliasType from further modification + self.__name__ = name + + def __setattr__(self, __name: str, __value: object) -> None: + if hasattr(self, "__name__"): + self._raise_attribute_error(__name) + super().__setattr__(__name, __value) + + def __delattr__(self, __name: str) -> Never: + self._raise_attribute_error(__name) + + def _raise_attribute_error(self, name: str) -> Never: + # Match the Python 3.12 error messages exactly + if name == "__name__": + raise AttributeError("readonly attribute") + elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: + raise AttributeError( + f"attribute '{name}' of 'typing.TypeAliasType' objects " + "is not writable" + ) + else: + raise AttributeError( + f"'typing.TypeAliasType' object has no attribute '{name}'" + ) + + def __repr__(self) -> str: + return self.__name__ + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + parameters = [ + typing._type_check( + item, f'Subscripting {self.__name__} requires a type.' + ) + for item in parameters + ] + return typing._GenericAlias(self, tuple(parameters)) + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + "type 'typing_extensions.TypeAliasType' is not an acceptable base type" + ) + + # The presence of this method convinces typing._type_check + # that TypeAliasTypes are types. + def __call__(self): + raise TypeError("Type alias is not callable") + + if sys.version_info >= (3, 10): + def __or__(self, right): + # For forward compatibility with 3.12, reject Unions + # that are not accepted by the built-in Union. + if not _is_unionable(right): + return NotImplemented + return typing.Union[self, right] + + def __ror__(self, left): + if not _is_unionable(left): + return NotImplemented + return typing.Union[left, self] + + +if hasattr(typing, "is_protocol"): + is_protocol = typing.is_protocol + get_protocol_members = typing.get_protocol_members +else: + def is_protocol(__tp: type) -> bool: + """Return True if the given type is a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False + """ + return ( + isinstance(__tp, type) + and getattr(__tp, '_is_protocol', False) + and __tp is not Protocol + and __tp is not getattr(typing, "Protocol", object()) + ) + + def get_protocol_members(__tp: type) -> typing.FrozenSet[str]: + """Return the set of members defined in a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) + frozenset({'a', 'b'}) + + Raise a TypeError for arguments that are not Protocols. + """ + if not is_protocol(__tp): + raise TypeError(f'{__tp!r} is not a Protocol') + if hasattr(__tp, '__protocol_attrs__'): + return frozenset(__tp.__protocol_attrs__) + return frozenset(_get_protocol_attrs(__tp)) + + +# Aliases for items that have always been in typing. +# Explicitly assign these (rather than using `from typing import *` at the top), +# so that we get a CI error if one of these is deleted from typing.py +# in a future version of Python +AbstractSet = typing.AbstractSet +AnyStr = typing.AnyStr +BinaryIO = typing.BinaryIO +Callable = typing.Callable +Collection = typing.Collection +Container = typing.Container +Dict = typing.Dict +ForwardRef = typing.ForwardRef +FrozenSet = typing.FrozenSet +Generator = typing.Generator +Generic = typing.Generic +Hashable = typing.Hashable +IO = typing.IO +ItemsView = typing.ItemsView +Iterable = typing.Iterable +Iterator = typing.Iterator +KeysView = typing.KeysView +List = typing.List +Mapping = typing.Mapping +MappingView = typing.MappingView +Match = typing.Match +MutableMapping = typing.MutableMapping +MutableSequence = typing.MutableSequence +MutableSet = typing.MutableSet +Optional = typing.Optional +Pattern = typing.Pattern +Reversible = typing.Reversible +Sequence = typing.Sequence +Set = typing.Set +Sized = typing.Sized +TextIO = typing.TextIO +Tuple = typing.Tuple +Union = typing.Union +ValuesView = typing.ValuesView +cast = typing.cast +no_type_check = typing.no_type_check +no_type_check_decorator = typing.no_type_check_decorator diff --git a/mysteriendrama/__pycache__/settings.cpython-311.pyc b/mysteriendrama/__pycache__/settings.cpython-311.pyc index e1ae18e..078d6b4 100644 Binary files a/mysteriendrama/__pycache__/settings.cpython-311.pyc and b/mysteriendrama/__pycache__/settings.cpython-311.pyc differ diff --git a/templates/anmeldung.html b/templates/anmeldung.html index ae0b810..c044a68 100644 --- a/templates/anmeldung.html +++ b/templates/anmeldung.html @@ -6,7 +6,7 @@

Anmeldung

-
+

Essensanmeldung

{% csrf_token %} @@ -59,7 +59,7 @@

Hinweise

-
    +
    1. Bei überschreiten der maximalen Sitzanzahl von 7 pro Veranstaltung wird Ihre gesamte Reservierung ungültig gemacht.
    2. Gerne können Sie den Rechnungsbetrag für Ihr Ticket auch überweisen. Die Kontodaten finden Sie unter Unterstützen. Bitte geben Sie Ihren Namen und einen Hinweis auf die Reservierung im Verwendungszweck mit an.
    3. Für Reservierungen werden die Hälfte aller Plätze bereitgestellt. Weitere Plätze sind, solange der Vorrat reicht, an der Kasse verfügbar.
    4. diff --git a/templates/datenschutz.html b/templates/datenschutz.html index 4362629..85db537 100644 --- a/templates/datenschutz.html +++ b/templates/datenschutz.html @@ -1,58 +1,9 @@ {% extends 'base.html' %} {% block title %}Datenschutz{% endblock title %} {% block content %} -
      -

      Datenschutzhinweise der Christengemeinschaft

      -

      Für unsere Mitglieder, Nutzer der Internetseite und sonstige Betroffene

      -

      I. Allgemeine Hinweise und Zuständigkeiten

      -

      1. Die Datenspeicherung und -verarbeitung innerhalb der Christengemeinschaft dient nur der Erfüllung ihres kirchlichen Auftrags. Jedermann darf darauf vertrauen, dass die Christengemeinschaft Daten nur für eigene Zwecke erhebt und nicht ohne Zustimmung der Betroffenen oder gesetzliche Verpflichtung an Dritte weitergibt. Innerhalb der Christengemeinschaft ist der Zugang zu persönlichen Daten auf die Personen beschränkt, die diese Daten zur Erfüllung ihrer Aufgaben benötigen.
      Aufzeichnungen, die in Wahrnehmung eines Seelsorgeauftrages erstellt werden, sind Dritten nicht zugänglich. Die besonderen Bestimmungen über den Schutz des Beicht- und Seelsorgegeheimnisses bleiben gewahrt.
      Die staatlichen Gesetze über den Datenschutz sind innerhalb der Christengemeinschaft nicht unmittelbar anzuwenden. Die Christengemeinschaft hat vielmehr entsprechend Artikel 91 der Europäischen Datenschutz-Grundverordnung (DSGVO) die eigene Datenschutzordnung entsprechend den rechtlichen Vorgaben angepasst.
      Die aktuelle Datenschutzordnung der Christengemeinschaft (DSO) kann hier eingesehen werden. Über die demnach geltenden Bestimmungen und Grundsätze informieren wir nachfolgend.

      -

      2. Die Datenschutzhinweise gelten für die Datenverarbeitung durch die Christengemeinschaft in Deutschland – Körperschaftsverband KdöR, Pfeifferstr. 4, 34121 Kassel, verantwortlich für die Datenverarbeitung: Stefan Illemann, 0561 8 10 46 34, kv.deutschland@christengemeinschaft.org, sowie alle angeschlossenen Regionalkörperschaften und sonstigen rechtlich selbständigen Einrichtungen der Christengemeinschaft , die sie mit den jeweiligen Kontaktdaten und Verantwortlichen hier finden.
      Die Hinweise gelten außerdem für alle den Regionalkörperschaften angeschlossenen Gemeinden, die sie hier finden.

      -

      3. Datenschutzbeauftragte für den gesamten zuvor genannten Bereich der Christengemeinschaft sind:

      -
        -
      • Dorothea Humérez (Koordinatorin der Region Bayern), Telefon: 0931 – 7 20 88 52
      • -
      -

      und als Stellvertreter

      -
        -
      • Thomas Nayda (Koordinator der Region Norddeutschland), Telefon: 040 – 44 40 54 22
      • -
      -

      Postanschrift:
      Die Christengemeinschaft in Deutschland, Körperschaftsverband KdöR
      Pfeifferstraße 4, 34121 Kassel
      E-Mail: datenschutz@christengemeinschaft.org

      -

      4. Über die Einhaltung der Datenschutzordnung in der Christengemeinschaft wacht eine unabhängige Aufsichtsstelle der Christengemeinschaft für den Datenschutz, die geleitet wird von

      - -

      II. Datenschutzhinweise für die Mitglieder der Christengemeinschaft und sonstige Betroffene.

      -

      1. Personenbezogene Daten werden in der Christengemeinschaft nach den Grundsätzen verarbeitet, die in § 4 DSO definiert sind: Rechtmäßigkeit, Zweckbindung, Datenminimierung, Richtigkeit, Speicherbegren-zung, Integrität und Vertraulichkeit.
      Jedermann hat das Recht, sich darüber zu informieren, welche Daten innerhalb der Christengemeinschaft über ihn gespeichert werden, er kann dem widersprechen und besitzt weitere in der DSO aufgeführte Be-troffenenrechte.

      -

      2. Wenn Sie Mitglied der Christengemeinschaft sind oder werden, Informationen anfordern oder als Liefe-rant oder Dienstleister für die Christengemeinschaft tätig werden wollen, waren oder sind, erheben wir folgende Informationen:

      -
        -
      • Anrede, Vorname, Nachname,
      • -
      • E-Mail-Adresse,
      • -
      • Anschrift,
      • -
      • Telefonnummer (Festnetz und/oder Mobilfunk),
      • -
      • Ggf. Geburtsdatum,
      • -
      • Kontoverbindung,
      • -
      • Informationen über ihre Beziehung zur Christengemeinschaft und im Falle der Mitgliedschaft der Familienverhältnisse und empfangene Sakramente.
      • -
      -

      Die Erhebung dieser Daten erfolgt,

      -
        -
      • um Sie als Mitglied, Interessent, Dienstleister oder Lieferant identifizieren zu können;
      • -
      • zur Korrespondenz mit Ihnen;
      • -
      • zur Rechnungsstellung, bzw. Beitragserhebung;
      • -
      • Im Falle der Mitgliedschaft zur Führung der Kirchenbücher.
      • -
      -

      3. Die Datenverarbeitung erfolgt auf Basis von §§ 6,9 DSO und ist zur angemessenen Bearbeitung des Mitgliedschafts-, Vertrags- oder Interessentenverhältnisses erforderlich.
      Die von uns erhobenen personenbezogenen Daten werden spätestens nach 10 Jahren gelöscht, es sei denn ihre Kenntnis ist nur für eine kürzere oder längere Zeit entsprechend § 13 DSO erforderlich.

      -

      4. Eine Offenlegung ihrer Daten innerhalb der Christengemeinschaft ist im Rahmen des § 7 DSO zulässig, sofern dies zur Erfüllung der Aufgaben der Christengemeinschaft erforderlich ist.
      Eine Übermittlung Ihrer persönlichen Daten an Dritte erfolgt durch die Christengemeinschaft nur auf Basis von gesetzlichen Verpflichtungen z.B. an die Steuerverwaltung oder im Rahmen der Auftragsverarbeitung z.B. durch einen Steuerberater.

      -

      III. Betroffenenrechte für die Mitglieder der Christengemeinschaft und sonstige Betroffene.

      -
        -
      • Sie haben das Recht:
        gemäß § 8 DSO eine einmal erteilte Einwilligung jederzeit gegenüber uns (Christengemeinschaft in Deutschland – Körperschaftsverband KdöR, Pfeifferstr. 4, 34121 Kassel) zu widerrufen. Dies hat zur Folge, dass wir die Datenverarbeitung, die auf dieser Einwilligung beruhte, für die Zukunft nicht mehr fortführen dürfen;
      • -
      • gemäß § 11 DSO Auskunft über Ihre von uns verarbeiteten personenbezogenen Daten zu verlangen. Insbesondere können Sie Auskunft über die Verarbeitungszwecke, die Kategorie der personenbezogenen Daten, die Kategorien von Empfängern, gegenüber denen Ihre Daten offengelegt wurden oder werden, die geplante Speicherdauer, das Bestehen eines Rechts auf Berichtigung, Löschung, Einschränkung der Verarbeitung oder Widerspruch, das Bestehen eines Beschwerderechts verlangen;
      • -
      • gemäß § 12 DSO unverzüglich die Berichtigung unrichtiger oder Vervollständigung Ihrer bei uns gespeicherten personenbezogenen Daten zu verlangen;
      • -
      • gemäß § 13 DSO die Löschung Ihrer bei uns gespeicherten personenbezogenen Daten zu verlangen, soweit nicht die Verarbeitung zur Ausübung des Rechts auf freie Meinungsäußerung und Information, zur Erfüllung einer rechtlichen Verpflichtung, aus Gründen des öffentlichen Interesses oder zur Geltendmachung, Ausübung oder Verteidigung von Rechtsansprüchen erforderlich ist;
      • -
      • gemäß § 14 DSO die Einschränkung der Verarbeitung Ihrer personenbezogenen Daten zu verlangen, soweit die Richtigkeit der Daten von Ihnen bestritten wird;
      • -
      • gemäß § 15 DSO Ihre personenbezogenen Daten, die Sie uns bereitgestellt haben, in einem strukturierten, gängigen und maschinenlesebaren Format zu erhalten oder die Übermittlung an einen anderen Verantwortlichen zu verlangen;
      • -
      • gemäß § 16 DSO Widerspruch gegen die Verarbeitung ihrer personenbezogenen Daten unter folgender Anschrift zu erheben:
        Christengemeinschaft in Deutschland – Körperschaftsverband KdöR, Pfeifferstr. 4, 34121 Kassel;
        es genügt eine E-Mail an: kv.deutschland@christengemeinschaft.org;
      • -
      • gemäß § 31 sich bei der unabhängigen Aufsichtsstelle der Christengemeinschaft zu beschweren. Dietmar Schwarz, Christengemeinschaft, Mittelweg 13, 20148 Hamburg, Tel: 040 – 41 33 02 72 / E-Mail: d.schwarz@cg-sozialwerke.de
      • -
      -

      IV. Datenschutzhinweise für Besucher dieser Website

      -

      1. Durch den Besuch unserer Website können Informationen über Ihren Zugriff (insbesondere Datum, Uhrzeit, aufgerufene Seite, genutzter Webbrowser, IP-Adresse) auf dem Server gespeichert werden. Diese Daten können nicht bestimmten Personen zugeordnet werden. Die Speicherung dient technischen und statistischen Zwecken. Unsere Website verwendet außerdem Cookies. Ein Cookie ist eine Textdatei, die auf Ihrer Festplatte zwischengespeichert wird. Wird der Server unserer Website erneut aufgerufen, sendet der Webbrowser den zuvor empfangenen Cookie wieder zurück an den Server. Durch Cookies kann insbe-sondere das Navigieren auf einer Internetseite erleichtert werden. Wenn Sie die Speicherung von Cookies unterbinden möchten, können Sie dies durch die Anpassung der Einstellungen Ihres Webbrowsers errei-chen. Allerdings wird hierdurch ggf. die Funktionalität der Website eingeschränkt.

      +
      +

      Datenschutzhinweise

      +
      Wir verarbeiten Ihre Daten nur zum Zwecke der Bereitstellung auf der Website angebotener Dienstleistungen. +
      {% endblock content %} diff --git a/templates/drama.html b/templates/drama.html index 0ed7c1c..2774cfc 100644 --- a/templates/drama.html +++ b/templates/drama.html @@ -10,6 +10,6 @@
      -

      Rudolf Steiner begann die Mysteriendramen zu schreiben mit dem Ziel, insgesamt die Textbücher für 12 Dramen fertigzustellen. Heute darf unsere Gesellschaft sich glücklich schätzen, die ersten vier Dramen zu kennen. Die Dramen sind Beispiele für die Verwandlung der Menschen auf dem Weg der inneren Schulung. Sie berichten von der geistigen und seelischen Entwicklung der Menschen.
      Im ersten Drama, der „Pforte der Einweihung“, geht es um Johannes Thomasius, welcher sich, aufgrund einer Erkenntnis, die er nach einem Vortragsbesuch hatte, in einer tiefen Lebenskrise befindet. Er versucht nun verzweifelt, seinen innersten Wesenskern zu finden, doch je genauer und kritischer er sich selbst betrachtet, desto mehr verschwindet sein Ich aus seinem Selbstbild. Außerdem empfindet er, ohne sich davon distanzieren zu können, alles, was seine Mitmenschen berichten oder erleiden, nach. Er widersteht jedoch den Versuchungen, sich in eine andere, der Realität fernen, Welt zu träumen und wird so schließlich von seinem Lehrer für bereit erklärt, sich auf den Entwicklungsweg zu begeben.
      Im Laufe des Stückes geht er dann durch eine ganz bestimmte Verwandlung, beeinflusst von guten und bösen Wesen und Geistern der Natur. Dabei macht nicht nur Johannes Thomasius eine Entwicklung durch, sondern auch alle, welche sich eng mit ihm verbunden hatten.
      Siehe hier für weitere Informationen: Die Mysteriendramen Rudolf Steiners | Anthrowiki

      +

      Rudolf Steiner begann die Mysteriendramen zu schreiben mit dem Ziel, insgesamt die Textbücher für 12 Dramen fertigzustellen. Heute darf unsere Gesellschaft sich glücklich schätzen, die ersten vier Dramen zu kennen. Die Dramen sind Beispiele für die Verwandlung der Menschen auf dem Weg der inneren Schulung. Sie berichten von der geistigen und seelischen Entwicklung der Menschen.
      Im ersten Drama, der „Pforte der Einweihung“, geht es um Johannes Thomasius, welcher sich, aufgrund einer Erkenntnis, die er nach einem Vortragsbesuch hatte, in einer tiefen Lebenskrise befindet. Er versucht nun verzweifelt, seinen innersten Wesenskern zu finden, doch je genauer und kritischer er sich selbst betrachtet, desto mehr verschwindet sein Ich aus seinem Selbstbild. Außerdem empfindet er, ohne sich davon distanzieren zu können, alles, was seine Mitmenschen berichten oder erleiden, nach. Er widersteht jedoch den Versuchungen, sich in eine andere, der Realität fernen, Welt zu träumen und wird so schließlich von seinem Lehrer für bereit erklärt, sich auf den Entwicklungsweg zu begeben.
      Im Laufe des Stückes geht er dann durch eine ganz bestimmte Verwandlung, beeinflusst von guten und bösen Wesen und Geistern der Natur. Dabei macht nicht nur Johannes Thomasius eine Entwicklung durch, sondern auch alle, welche sich eng mit ihm verbunden hatten.

      {% endblock content %} diff --git a/templates/footer.html b/templates/footer.html index 1f21dac..0f46b46 100644 --- a/templates/footer.html +++ b/templates/footer.html @@ -1,9 +1,6 @@
      -
      + -
      -
      ©2023 Die Christengemeinschaft in Bayern KdöR, Gemeinde Nürnberg
      -
      diff --git a/templates/impressum.html b/templates/impressum.html index db0cf60..60242f8 100644 --- a/templates/impressum.html +++ b/templates/impressum.html @@ -4,12 +4,12 @@

      Impressum

      Angaben gemäß § 5 TMG

      -

      Die Christengemeinschaft – Gemeinde Nürnberg (KdöR)

      -

      Krelingstr. 26 90408 Nürnberg Vertreten durch: Daniel Hafner (Pfarrer)

      +

      Daniel Hafner

      +

      Kaulbachstraße 11 90408 Nürnberg

      Kontakt:
      Telefon: +49-911-93207882
      Email: dhafner1964@hotmail.com

      -

      Verantwortlich für den Inhalt nach § 55 Abs. 2 RStV: Daniel Hafner (Pfarrer)

      +

      Verantwortlich für den Inhalt nach § 55 Abs. 2 RStV: Daniel Hafner

      Haftungsausschluss (Disclaimer)

      Haftung für Inhalte Als Diensteanbieter sind wir gemäß § 7 Abs.1 TMG für eigene Inhalte auf diesen Seiten nach den allgemeinen Gesetzen verantwortlich. Nach §§ 8 bis 10 TMG sind wir als Diensteanbieter jedoch nicht verpflichtet, übermittelte oder gespeicherte fremde Informationen zu überwachen oder nach Umständen zu forschen, die auf eine rechtswidrige Tätigkeit hinweisen. Verpflichtungen zur Entfernung oder Sperrung der Nutzung von Informationen nach den allgemeinen Gesetzen bleiben hiervon unberührt. Eine diesbezügliche Haftung ist jedoch erst ab dem Zeitpunkt der Kenntnis einer konkreten Rechtsverletzung möglich. Bei Bekanntwerden von entsprechenden Rechtsverletzungen werden wir diese Inhalte umgehend entfernen. Haftung für Links Unser Angebot enthält Links zu externen Webseiten Dritter, auf deren Inhalte wir keinen Einfluss haben. Deshalb können wir für diese fremden Inhalte auch keine Gewähr übernehmen. Für die Inhalte der verlinkten Seiten ist stets der jeweilige Anbieter oder Betreiber der Seiten verantwortlich. Die verlinkten Seiten wurden zum Zeitpunkt der Verlinkung auf mögliche Rechtsverstöße überprüft. Rechtswidrige Inhalte waren zum Zeitpunkt der Verlinkung nicht erkennbar. Eine permanente inhaltliche Kontrolle der verlinkten Seiten ist jedoch ohne konkrete Anhaltspunkte einer Rechtsverletzung nicht zumutbar. Bei Bekanntwerden von Rechtsverletzungen werden wir derartige Links umgehend entfernen. Urheberrecht Die durch die Seitenbetreiber erstellten Inhalte und Werke auf diesen Seiten unterliegen dem deutschen Urheberrecht. Die Vervielfältigung, Bearbeitung, Verbreitung und jede Art der Verwertung außerhalb der Grenzen des Urheberrechtes bedürfen der schriftlichen Zustimmung des jeweiligen Autors bzw. Erstellers. Downloads und Kopien dieser Seite sind nur für den privaten, nicht kommerziellen Gebrauch gestattet. Soweit die Inhalte auf dieser Seite nicht vom Betreiber erstellt wurden, werden die Urheberrechte Dritter beachtet. Insbesondere werden Inhalte Dritter als solche gekennzeichnet. Sollten Sie trotzdem auf eine Urheberrechtsverletzung aufmerksam werden, bitten wir um einen entsprechenden Hinweis. Bei Bekanntwerden von Rechtsverletzungen werden wir derartige Inhalte umgehend entfernen.

      diff --git a/templates/ueber.html b/templates/ueber.html index 68bcacd..cbb043f 100644 --- a/templates/ueber.html +++ b/templates/ueber.html @@ -3,8 +3,6 @@ {% block title %}Über uns{% endblock title %} {% block content %}
      -
      -
      diff --git a/website/__pycache__/views.cpython-311.pyc b/website/__pycache__/views.cpython-311.pyc index 6d6a6c1..1dadaec 100644 Binary files a/website/__pycache__/views.cpython-311.pyc and b/website/__pycache__/views.cpython-311.pyc differ diff --git a/website/views.py b/website/views.py index 060becb..a24b93e 100644 --- a/website/views.py +++ b/website/views.py @@ -74,12 +74,32 @@ def anmeldung(request): essen_form = EssenForm(request.POST) if essen_form.is_valid(): essen_form.save() - return render(request, 'anmeldung.html') + name = request.POST.get('name') + mail = request.POST.get('mail') + send_mail('Essensanmeldung erhalten!', '''Sehr geehrte/r ''' + name + ''', +hiermit möchten wir Ihnen den Erhalt Ihrer Essensanmeldung bestätigen. +Mit freundlichen Grüßen, +Ihr Team der Jugendgruppe Mysteriendrama''', 'webmailer@denkena-consulting.com', [mail, 'mysteriendrama@denkena-consulting.com', 'dhafner1964@hotmail.com',], fail_silently=False) + context = init_context() + return render(request, 'anmeldung.html', context) if request.method == 'POST' and request.POST.get("form_type") == "reservierung": reservierung_form = ReservierungForm(request.POST) if reservierung_form.is_valid(): reservierung_form.save() - return render(request, 'anmeldung.html') + name = request.POST.get('name') + mail = request.POST.get('mail') + anzahl_b = request.POST.get('anzahl_b') + anzahl_w = request.POST.get('anzahl_w') + anzahl_l = request.POST.get('anzahl_l') + message_body = '''Sehr geehrte/r ''' + name + ''',\nhiermit möchten wir Ihnen den Erhalt Ihrer Sitzplatzreservierung bestätigen.\n''' + if int(anzahl_b) > 0: message_body += ('Sitzplätze in Böblingen: ' + anzahl_b + '.\n') + if int(anzahl_w) > 0: message_body += ('Sitzplätze in Wuppertal: ' + anzahl_w + '.\n') + if int(anzahl_l) > 0: message_body += ('Sitzplätze in Lübeck: ' + anzahl_l + '.\n') + message_body += ('''Mit freundlichen Grüßen, +Ihr Team der Jugendgruppe Mysteriendrama''') + send_mail('Reservierung erhalten!', message_body, 'webmailer@denkena-consulting.com', [mail, 'mysteriendrama@denkena-consulting.com', 'dhafner1964@hotmail.com'], fail_silently=False) + context = init_context() + return render(request, 'anmeldung.html', context) if request.method == 'GET': context = init_context() return render(request, 'anmeldung.html', context)