resubmit, added gpio_defaults
diff --git a/Makefile b/Makefile
index 6965c80..7a99ad4 100644
--- a/Makefile
+++ b/Makefile
@@ -13,18 +13,28 @@
 # limitations under the License.
 #
 # SPDX-License-Identifier: Apache-2.0
+MAKEFLAGS+=--warn-undefined-variables
 
-CARAVEL_ROOT?=$(PWD)/caravel
+export CARAVEL_ROOT?=$(PWD)/caravel
 PRECHECK_ROOT?=${HOME}/mpw_precheck
-SIM ?= RTL
+export MCW_ROOT?=$(PWD)/mgmt_core_wrapper
+SIM?=RTL
 
-export OPEN_PDKS_COMMIT?=41c0908b47130d5675ff8484255b43f66463a7d6
-export PDK=sky130B
-export PDKPATH?=$(PDK_ROOT)/$(PDK)
 # Install lite version of caravel, (1): caravel-lite, (0): caravel
 CARAVEL_LITE?=1
 
-MPW_TAG ?= mpw-7a
+# PDK switch varient
+export PDK?=sky130A
+#export PDK?=gf180mcuC
+export PDKPATH?=$(PDK_ROOT)/$(PDK)
+
+
+
+ifeq ($(PDK),sky130A)
+	SKYWATER_COMMIT=f70d8ca46961ff92719d8870a18a076370b85f6c
+	export OPEN_PDKS_COMMIT?=0059588eebfc704681dc2368bd1d33d96281d10f
+	export OPENLANE_TAG?=2022.11.19
+	MPW_TAG ?= mpw-8a
 
 ifeq ($(CARAVEL_LITE),1)
 	CARAVEL_NAME := caravel-lite
@@ -36,55 +46,157 @@
 	CARAVEL_TAG := $(MPW_TAG)
 endif
 
+endif
+
+ifeq ($(PDK),sky130B)
+	SKYWATER_COMMIT=f70d8ca46961ff92719d8870a18a076370b85f6c
+	export OPEN_PDKS_COMMIT?=0059588eebfc704681dc2368bd1d33d96281d10f
+	export OPENLANE_TAG?=2022.11.19
+	MPW_TAG ?= mpw-8a
+
+ifeq ($(CARAVEL_LITE),1)
+	CARAVEL_NAME := caravel-lite
+	CARAVEL_REPO := https://github.com/efabless/caravel-lite
+	CARAVEL_TAG := $(MPW_TAG)
+else
+	CARAVEL_NAME := caravel
+	CARAVEL_REPO := https://github.com/efabless/caravel
+	CARAVEL_TAG := $(MPW_TAG)
+endif
+
+endif
+
+ifeq ($(PDK),gf180mcuC)
+
+	MPW_TAG ?= gfmpw-0a
+	CARAVEL_NAME := caravel
+	CARAVEL_REPO := https://github.com/efabless/caravel-gf180mcu
+	CARAVEL_TAG := $(MPW_TAG)
+	#OPENLANE_TAG=ddfeab57e3e8769ea3d40dda12be0460e09bb6d9
+	export OPEN_PDKS_COMMIT?=0059588eebfc704681dc2368bd1d33d96281d10f
+	export OPENLANE_TAG?=2022.11.19
+
+endif
+
 # Include Caravel Makefile Targets
 .PHONY: % : check-caravel
-%: 
+%:
 	export CARAVEL_ROOT=$(CARAVEL_ROOT) && $(MAKE) -f $(CARAVEL_ROOT)/Makefile $@
 
-# Verify Target for running simulations
-.PHONY: verify
-verify:
-	cd ./verilog/dv/ && \
-	export SIM=${SIM} && \
-		$(MAKE) -j$(THREADS)
+.PHONY: install
+install:
+	if [ -d "$(CARAVEL_ROOT)" ]; then\
+		echo "Deleting exisiting $(CARAVEL_ROOT)" && \
+		rm -rf $(CARAVEL_ROOT) && sleep 2;\
+	fi
+	echo "Installing $(CARAVEL_NAME).."
+	git clone -b $(CARAVEL_TAG) $(CARAVEL_REPO) $(CARAVEL_ROOT) --depth=1
 
 # Install DV setup
 .PHONY: simenv
 simenv:
-	docker pull efabless/dv_setup:latest
+	docker pull efabless/dv:latest
 
-PATTERNS=$(shell cd verilog/dv && find * -maxdepth 0 -type d)
-DV_PATTERNS = $(foreach dv, $(PATTERNS), verify-$(dv))
+.PHONY: setup
+setup: install check-env install_mcw openlane pdk-with-volare setup-timing-scripts
+
+# Openlane
+blocks=$(shell cd openlane && find * -maxdepth 0 -type d)
+.PHONY: $(blocks)
+$(blocks): % :
+	$(MAKE) -C openlane $*
+
+dv_patterns=$(shell cd verilog/dv && find * -maxdepth 0 -type d)
+dv-targets-rtl=$(dv_patterns:%=verify-%-rtl)
+dv-targets-gl=$(dv_patterns:%=verify-%-gl)
+dv-targets-gl-sdf=$(dv_patterns:%=verify-%-gl-sdf)
+
 TARGET_PATH=$(shell pwd)
-VERIFY_COMMAND="cd ${TARGET_PATH}/verilog/dv/$* && export SIM=${SIM} && make"
-$(DV_PATTERNS): verify-% : ./verilog/dv/% 
+verify_command="source ~/.bashrc && cd ${TARGET_PATH}/verilog/dv/$* && export SIM=${SIM} && make"
+dv_base_dependencies=simenv
+docker_run_verify=\
 	docker run -v ${TARGET_PATH}:${TARGET_PATH} -v ${PDK_ROOT}:${PDK_ROOT} \
-                -v ${CARAVEL_ROOT}:${CARAVEL_ROOT} \
-                -e TARGET_PATH=${TARGET_PATH} -e PDK_ROOT=${PDK_ROOT} \
-                -e CARAVEL_ROOT=${CARAVEL_ROOT} \
-                -u $(id -u $$USER):$(id -g $$USER) efabless/dv_setup:latest \
-                sh -c $(VERIFY_COMMAND)
-				
-# Openlane Makefile Targets
-BLOCKS = $(shell cd openlane && find * -maxdepth 0 -type d)
-.PHONY: $(BLOCKS)
-$(BLOCKS): %:
-	cd openlane && $(MAKE) $*
+		-v ${CARAVEL_ROOT}:${CARAVEL_ROOT} \
+		-e TARGET_PATH=${TARGET_PATH} -e PDK_ROOT=${PDK_ROOT} \
+		-e CARAVEL_ROOT=${CARAVEL_ROOT} \
+		-e TOOLS=/foss/tools/riscv-gnu-toolchain-rv32i/217e7f3debe424d61374d31e33a091a630535937 \
+		-e DESIGNS=$(TARGET_PATH) \
+		-e USER_PROJECT_VERILOG=$(TARGET_PATH)/verilog \
+		-e PDK=$(PDK) \
+		-e CORE_VERILOG_PATH=$(TARGET_PATH)/mgmt_core_wrapper/verilog \
+		-e CARAVEL_VERILOG_PATH=$(TARGET_PATH)/caravel/verilog \
+		-e MCW_ROOT=$(MCW_ROOT) \
+		-u $$(id -u $$USER):$$(id -g $$USER) efabless/dv:latest \
+		sh -c $(verify_command)
 
-# Install caravel
-.PHONY: install
-install:
-	@echo "Installing $(CARAVEL_NAME).."
-	@git clone -b $(CARAVEL_TAG) $(CARAVEL_REPO) $(CARAVEL_ROOT)
+.PHONY: harden
+harden: $(blocks)
+
+.PHONY: verify
+verify: $(dv-targets-rtl)
+
+.PHONY: verify-all-rtl
+verify-all-rtl: $(dv-targets-rtl)
+
+.PHONY: verify-all-gl
+verify-all-gl: $(dv-targets-gl)
+
+.PHONY: verify-all-gl-sdf
+verify-all-gl-sdf: $(dv-targets-gl-sdf)
+
+$(dv-targets-rtl): SIM=RTL
+$(dv-targets-rtl): verify-%-rtl: $(dv_base_dependencies)
+	$(docker_run_verify)
+
+$(dv-targets-gl): SIM=GL
+$(dv-targets-gl): verify-%-gl: $(dv_base_dependencies)
+	$(docker_run_verify)
+
+$(dv-targets-gl-sdf): SIM=GL_SDF
+$(dv-targets-gl-sdf): verify-%-gl-sdf: $(dv_base_dependencies)
+	$(docker_run_verify)
+
+clean-targets=$(blocks:%=clean-%)
+.PHONY: $(clean-targets)
+$(clean-targets): clean-% :
+	rm -f ./verilog/gl/$*.v
+	rm -f ./spef/$*.spef
+	rm -f ./sdc/$*.sdc
+	rm -f ./sdf/$*.sdf
+	rm -f ./gds/$*.gds
+	rm -f ./mag/$*.mag
+	rm -f ./lef/$*.lef
+	rm -f ./maglef/*.maglef
+
+make_what=setup $(blocks) $(dv-targets-rtl) $(dv-targets-gl) $(dv-targets-gl-sdf) $(clean-targets)
+.PHONY: what
+what:
+	# $(make_what)
+
+# Install Openlane
+.PHONY: openlane
+openlane:
+	@if [ "$$(realpath $${OPENLANE_ROOT})" = "$$(realpath $$(pwd)/openlane)" ]; then\
+		echo "OPENLANE_ROOT is set to '$$(pwd)/openlane' which contains openlane config files"; \
+		echo "Please set it to a different directory"; \
+		exit 1; \
+	fi
+	cd openlane && $(MAKE) openlane
+
+#### Not sure if the targets following are of any use
 
 # Create symbolic links to caravel's main files
 .PHONY: simlink
 simlink: check-caravel
-### Symbolic links relative path to $CARAVEL_ROOT 
+### Symbolic links relative path to $CARAVEL_ROOT
 	$(eval MAKEFILE_PATH := $(shell realpath --relative-to=openlane $(CARAVEL_ROOT)/openlane/Makefile))
+	$(eval PIN_CFG_PATH  := $(shell realpath --relative-to=openlane/user_project_wrapper $(CARAVEL_ROOT)/openlane/user_project_wrapper_empty/pin_order.cfg))
 	mkdir -p openlane
+	mkdir -p openlane/user_project_wrapper
 	cd openlane &&\
 	ln -sf $(MAKEFILE_PATH) Makefile
+	cd openlane/user_project_wrapper &&\
+	ln -sf $(PIN_CFG_PATH) pin_order.cfg
 
 # Update Caravel
 .PHONY: update_caravel
@@ -93,13 +205,9 @@
 
 # Uninstall Caravel
 .PHONY: uninstall
-uninstall: 
+uninstall:
 	rm -rf $(CARAVEL_ROOT)
 
-# Install Openlane
-.PHONY: openlane
-openlane: 
-	cd openlane && $(MAKE) openlane
 
 # Install Pre-check
 # Default installs to the user home directory, override by "export PRECHECK_ROOT=<precheck-installation-path>"
@@ -122,7 +230,8 @@
 	-u $(shell id -u $(USER)):$(shell id -g $(USER)) \
 	efabless/mpw_precheck:latest bash -c "cd $(PRECHECK_ROOT) ; python3 mpw_precheck.py --input_directory $(INPUT_DIRECTORY) --pdk_path $(PDK_ROOT)/$(PDK)"
 
-# Clean 
+
+
 .PHONY: clean
 clean:
 	cd ./verilog/dv/ && \
@@ -148,5 +257,68 @@
 
 .PHONY: help
 help:
-	cd $(CARAVEL_ROOT) && $(MAKE) help 
+	cd $(CARAVEL_ROOT) && $(MAKE) help
 	@$(MAKE) -pRrq -f $(lastword $(MAKEFILE_LIST)) : 2>/dev/null | awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | sort | egrep -v -e '^[^[:alnum:]]' -e '^$@$$'
+
+
+export CUP_ROOT=$(shell pwd)
+export TIMING_ROOT?=$(shell pwd)/deps/timing-scripts
+export PROJECT_ROOT=$(CUP_ROOT)
+timing-scripts-repo=https://github.com/efabless/timing-scripts.git
+
+$(TIMING_ROOT):
+	@mkdir -p $(CUP_ROOT)/deps
+	@git clone $(timing-scripts-repo) $(TIMING_ROOT)
+
+.PHONY: setup-timing-scripts
+setup-timing-scripts: $(TIMING_ROOT)
+	@( cd $(TIMING_ROOT) && git pull )
+	@#( cd $(TIMING_ROOT) && git fetch && git checkout $(MPW_TAG); )
+	@python3 -m venv ./venv 
+		. ./venv/bin/activate && \
+		python3 -m pip install --upgrade pip && \
+		python3 -m pip install -r $(TIMING_ROOT)/requirements.txt && \
+		deactivate
+
+./verilog/gl/user_project_wrapper.v:
+	$(error you don't have $@)
+
+./env/spef-mapping.tcl: 
+	@echo "run the following:"
+	@echo "make extract-parasitics"
+	@echo "make create-spef-mapping"
+	exit 1
+
+.PHONY: create-spef-mapping
+create-spef-mapping: ./verilog/gl/user_project_wrapper.v
+	@. ./venv/bin/activate && \
+		python3 $(TIMING_ROOT)/scripts/generate_spef_mapping.py \
+			-i ./verilog/gl/user_project_wrapper.v \
+			-o ./env/spef-mapping.tcl \
+			--pdk-path $(PDK_ROOT)/$(PDK) \
+			--macro-parent mprj \
+			--project-root "$(CUP_ROOT)" && \
+		deactivate
+
+.PHONY: extract-parasitics
+extract-parasitics: ./verilog/gl/user_project_wrapper.v
+	@. ./venv/bin/activate && \
+		python3 $(TIMING_ROOT)/scripts/get_macros.py \
+		-i ./verilog/gl/user_project_wrapper.v \
+		-o ./tmp-macros-list \
+		--project-root "$(CUP_ROOT)" \
+		--pdk-path $(PDK_ROOT)/$(PDK) && \
+		deactivate
+		@cat ./tmp-macros-list | cut -d " " -f2 \
+			| xargs -I % bash -c "$(MAKE) -C $(TIMING_ROOT) \
+				-f $(TIMING_ROOT)/timing.mk rcx-% || echo 'Cannot extract %. Probably no def for this macro'"
+	@$(MAKE) -C $(TIMING_ROOT) -f $(TIMING_ROOT)/timing.mk rcx-user_project_wrapper
+	@cat ./tmp-macros-list
+	@rm ./tmp-macros-list
+	
+.PHONY: caravel-sta
+caravel-sta: ./env/spef-mapping.tcl
+	@$(MAKE) -C $(TIMING_ROOT) -f $(TIMING_ROOT)/timing.mk caravel-timing-typ
+	@$(MAKE) -C $(TIMING_ROOT) -f $(TIMING_ROOT)/timing.mk caravel-timing-fast
+	@$(MAKE) -C $(TIMING_ROOT) -f $(TIMING_ROOT)/timing.mk caravel-timing-slow
+	@echo "You can find results for all corners in $(CUP_ROOT)/signoff/caravel/openlane-signoff/timing/"
diff --git a/caravel b/caravel
index de98d51..d2b96d7 160000
--- a/caravel
+++ b/caravel
@@ -1 +1 @@
-Subproject commit de98d514aa6c642ef020876a64c4cdb2c9ea9a8a
+Subproject commit d2b96d7206208c9dcd721c44798bf2e5da8f8ad1
diff --git a/deps/timing-scripts b/deps/timing-scripts
new file mode 160000
index 0000000..5aae69b
--- /dev/null
+++ b/deps/timing-scripts
@@ -0,0 +1 @@
+Subproject commit 5aae69b4cfa571094bb10cd973dd91e1665cc581
diff --git a/mag/user_analog_project_wrapper.mag b/mag/user_analog_project_wrapper.mag
index d723827..a2d627f 100644
--- a/mag/user_analog_project_wrapper.mag
+++ b/mag/user_analog_project_wrapper.mag
@@ -1,7 +1,7 @@
 magic
 tech sky130B
 magscale 1 2
-timestamp 1662330820
+timestamp 1669077252
 << locali >>
 rect 53780 204191 53850 204462
 rect 513448 44321 513850 44391
@@ -1256,12 +1256,14 @@
 rect 153002 -800 153114 480
 rect 154184 -800 154296 480
 rect 155366 -800 155478 10514
-rect 156548 -800 156660 480
-rect 157730 -800 157842 29354
+rect 157730 7461 157842 29354
 rect 161276 28706 161388 28715
-rect 158912 -800 159024 480
-rect 160094 -800 160206 480
-rect 161276 -800 161388 28594
+rect 157730 7349 159024 7461
+rect 157730 6913 157842 7349
+rect 156548 -800 156660 480
+rect 157730 -800 157842 833
+rect 158912 -800 159024 7349
+rect 161276 7316 161388 28594
 rect 181895 28280 181955 79722
 rect 182235 79688 182295 79690
 rect 182228 79632 182237 79688
@@ -1272,24 +1274,28 @@
 rect 182090 78279 182160 78285
 rect 164830 28256 181955 28280
 rect 164822 28220 181955 28256
-rect 162458 -800 162570 480
-rect 163640 -800 163752 480
-rect 164822 -800 164934 28220
+rect 164822 7377 164934 28220
 rect 168368 27630 168480 27816
 rect 182235 27630 182295 79632
 rect 168368 27570 182295 27630
-rect 166004 -800 166116 480
-rect 167186 -800 167298 480
-rect 168368 -800 168480 27570
+rect 161264 7204 162570 7316
+rect 161276 6913 161388 7204
+rect 160094 -800 160206 480
+rect 161276 -800 161388 833
+rect 162458 -800 162570 7204
+rect 164822 7265 166116 7377
+rect 164822 6913 164934 7265
+rect 163640 -800 163752 480
+rect 164822 -800 164934 833
+rect 166004 -800 166116 7265
+rect 168368 7235 168480 27570
 rect 171914 27080 172026 27096
 rect 182577 27080 182633 27087
 rect 171860 27078 182635 27080
 rect 171860 27022 182577 27078
 rect 182633 27022 182635 27078
 rect 171860 27020 182635 27022
-rect 169550 -800 169662 480
-rect 170732 -800 170844 480
-rect 171914 -800 172026 27020
+rect 171914 7267 172026 27020
 rect 182577 27013 182633 27020
 rect 175460 26320 175572 26336
 rect 182915 26320 182975 79922
@@ -1302,9 +1308,7 @@
 rect 183160 78175 183166 78245
 rect 183090 78166 183160 78175
 rect 175410 26260 182975 26320
-rect 173096 -800 173208 480
-rect 174278 -800 174390 480
-rect 175460 -800 175572 26260
+rect 175460 7537 175572 26260
 rect 179006 25400 179118 25416
 rect 183255 25400 183315 79512
 rect 183869 79487 183878 79543
@@ -1317,21 +1321,42 @@
 rect 183867 74357 183876 74417
 rect 183936 74357 183945 74417
 rect 178830 25340 183315 25400
-rect 176642 -800 176754 480
-rect 177824 -800 177936 480
-rect 179006 -800 179118 25340
+rect 175460 7425 176754 7537
+rect 168359 7123 169662 7235
+rect 168368 6913 168480 7123
+rect 167186 -800 167298 480
+rect 168368 -800 168480 833
+rect 169550 -800 169662 7123
+rect 171914 7155 173208 7267
+rect 171914 6913 172026 7155
+rect 170732 -800 170844 480
+rect 171914 -800 172026 833
+rect 173096 -800 173208 7155
+rect 175460 6913 175572 7425
+rect 174278 -800 174390 480
+rect 175460 -800 175572 833
+rect 176642 -800 176754 7425
+rect 179006 7317 179118 25340
 rect 182552 23676 182664 23685
-rect 180188 -800 180300 480
+rect 179006 7205 180300 7317
+rect 179006 6913 179118 7205
+rect 177824 -800 177936 480
+rect 179006 -800 179118 833
+rect 180188 -800 180300 7205
+rect 182552 7116 182664 23564
+rect 182552 7004 183846 7116
+rect 182552 6913 182664 7004
 rect 181370 -800 181482 480
-rect 182552 -800 182664 23564
-rect 184215 1496 184275 80092
+rect 182552 -800 182664 833
+rect 183734 -800 183846 7004
+rect 184215 5258 184275 80092
 rect 185915 79978 185975 79980
 rect 185908 79922 185917 79978
 rect 185973 79922 185982 79978
 rect 184895 79778 184955 79780
 rect 184888 79722 184897 79778
 rect 184953 79722 184962 79778
-rect 184895 3170 184955 79722
+rect 184895 6046 184955 79722
 rect 185235 79688 185295 79690
 rect 185228 79632 185237 79688
 rect 185293 79632 185302 79688
@@ -1339,8 +1364,8 @@
 rect 185084 78095 185090 78165
 rect 185160 78095 185166 78165
 rect 185090 78086 185160 78095
-rect 185235 3690 185295 79632
-rect 185915 4900 185975 79922
+rect 185235 6357 185295 79632
+rect 185915 6785 185975 79922
 rect 186255 79568 186315 79570
 rect 186248 79512 186257 79568
 rect 186313 79512 186322 79568
@@ -1348,7 +1373,7 @@
 rect 186084 78475 186090 78545
 rect 186160 78475 186166 78545
 rect 186090 78466 186160 78475
-rect 186255 5760 186315 79512
+rect 186255 7312 186315 79512
 rect 186876 63763 186936 83956
 rect 187215 80148 187275 80150
 rect 187208 80092 187217 80148
@@ -1358,14 +1383,14 @@
 rect 187160 78585 187166 78655
 rect 187090 78576 187160 78585
 rect 186876 63694 186936 63703
-rect 187215 7050 187275 80092
+rect 187215 7847 187275 80092
 rect 188915 79978 188975 79980
 rect 188908 79922 188917 79978
 rect 188973 79922 188982 79978
 rect 187895 79778 187955 79780
 rect 187888 79722 187897 79778
 rect 187953 79722 187962 79778
-rect 187895 7790 187955 79722
+rect 187895 8347 187955 79722
 rect 188235 79688 188295 79690
 rect 188228 79632 188237 79688
 rect 188293 79632 188302 79688
@@ -1373,8 +1398,8 @@
 rect 188084 78255 188090 78325
 rect 188160 78255 188166 78325
 rect 188090 78246 188160 78255
-rect 188235 8640 188295 79632
-rect 188915 9140 188975 79922
+rect 188235 8530 188295 79632
+rect 188915 9091 188975 79922
 rect 189876 79895 189936 79897
 rect 189869 79839 189878 79895
 rect 189934 79839 189943 79895
@@ -1385,7 +1410,7 @@
 rect 189084 78425 189090 78495
 rect 189160 78425 189166 78495
 rect 189090 78416 189160 78425
-rect 189255 9410 189315 79512
+rect 189255 9386 189315 79512
 rect 189876 64469 189936 79839
 rect 190090 78435 190160 78444
 rect 190084 78365 190090 78435
@@ -1874,103 +1899,78 @@
 rect 488669 17783 488969 17792
 rect 464628 17510 465028 17519
 rect 492236 16324 492348 16333
-rect 239279 9694 239288 9806
-rect 239400 9694 239409 9806
-rect 235742 9410 235854 9486
-rect 189255 9350 236220 9410
-rect 188915 9080 233210 9140
-rect 188577 8940 188633 8947
-rect 228650 8940 228762 9030
-rect 188575 8938 228762 8940
-rect 188575 8882 188577 8938
-rect 188633 8882 228762 8938
-rect 188575 8880 228762 8882
-rect 188577 8873 188633 8880
-rect 225104 8640 225216 8696
-rect 188235 8580 226120 8640
-rect 221558 7790 221670 7796
-rect 187895 7730 222040 7790
-rect 218012 7286 218124 7295
-rect 218003 7184 218012 7286
-rect 218124 7184 218133 7286
-rect 214466 7050 214578 7086
-rect 187215 6990 215300 7050
-rect 210920 6358 211032 6486
-rect 210920 6302 210962 6358
-rect 211018 6302 211032 6358
-rect 207374 5760 207486 5776
-rect 186255 5700 208070 5760
-rect 203194 4900 203940 4916
-rect 185915 4840 203940 4900
-rect 203194 4804 203940 4840
-rect 185577 4450 185633 4457
-rect 185575 4448 200510 4450
-rect 185575 4392 185577 4448
-rect 185633 4392 200510 4448
-rect 185575 4390 200510 4392
-rect 185577 4383 185633 4390
-rect 196736 3690 196848 3756
-rect 185235 3630 196848 3690
-rect 193190 3170 193302 3186
-rect 184895 3110 193302 3170
-rect 184557 2500 184613 2507
-rect 189644 2500 189756 2516
-rect 184555 2498 189756 2500
-rect 184555 2442 184557 2498
-rect 184613 2442 189756 2498
-rect 184555 2440 189756 2442
-rect 184557 2433 184613 2440
-rect 184215 1384 186210 1496
-rect 184215 1280 184275 1384
-rect 183734 -800 183846 480
+rect 236924 9386 237036 9553
+rect 189255 9326 237036 9386
+rect 188915 9031 234706 9091
+rect 229832 8753 229944 8762
+rect 226286 8530 226398 8545
+rect 188235 8470 227034 8530
+rect 187895 8287 223339 8347
+rect 187215 7787 216182 7847
+rect 208556 7312 208668 7314
+rect 186255 7252 208676 7312
+rect 186255 6913 186315 7252
+rect 203458 6837 205122 6838
+rect 191510 6785 205122 6837
+rect 185915 6726 205122 6785
+rect 185915 6725 203794 6726
+rect 185235 6297 198141 6357
+rect 184895 5986 194496 6046
+rect 190826 5450 190938 5727
+rect 184213 5146 187392 5258
 rect 184916 -800 185028 480
-rect 186098 -800 186210 1384
-rect 187280 -800 187392 480
+rect 186098 -800 186210 833
+rect 187280 -800 187392 5146
 rect 188462 -800 188574 480
-rect 189644 -800 189756 2440
-rect 190826 -800 190938 480
+rect 189644 -800 189756 833
+rect 190826 -800 190938 5338
+rect 194372 5537 194496 5986
 rect 192008 -800 192120 480
-rect 193190 -800 193302 3110
-rect 194372 -800 194484 480
+rect 193190 -800 193302 833
+rect 194372 -800 194484 5537
 rect 195554 -800 195666 480
-rect 196736 -800 196848 3630
-rect 197918 -800 198030 480
+rect 196736 -800 196848 833
+rect 197918 -800 198030 6297
+rect 201464 3114 201576 3123
 rect 199100 -800 199212 480
-rect 200282 -800 200394 4390
-rect 201464 -800 201576 480
+rect 200282 -800 200394 833
+rect 201464 -800 201576 3002
 rect 202646 -800 202758 480
-rect 203828 -800 203940 4804
-rect 205010 -800 205122 480
+rect 203828 -800 203940 833
+rect 205010 -800 205122 6726
 rect 206192 -800 206304 480
-rect 207374 -800 207486 5700
-rect 208556 -800 208668 480
+rect 207374 -800 207486 833
+rect 208556 -800 208668 7252
+rect 212102 7091 212214 7100
 rect 209738 -800 209850 480
-rect 210920 -800 211032 6302
-rect 212102 -800 212214 480
+rect 210920 -800 211032 833
+rect 212102 -800 212214 6979
 rect 213284 -800 213396 480
-rect 214466 -800 214578 6990
-rect 215648 -800 215760 480
+rect 214466 -800 214578 833
+rect 215648 -800 215760 7787
+rect 219194 7689 219306 7698
 rect 216830 -800 216942 480
-rect 218012 -800 218124 7174
-rect 219194 -800 219306 480
+rect 218012 -800 218124 833
+rect 219194 -800 219306 7577
 rect 220376 -800 220488 480
-rect 221558 -800 221670 7730
-rect 222740 -800 222852 480
+rect 221558 -800 221670 833
+rect 222740 -800 222852 8287
 rect 223922 -800 224034 480
-rect 225104 -800 225216 8580
-rect 226286 -800 226398 480
+rect 225104 -800 225216 833
+rect 226286 -800 226398 8470
 rect 227468 -800 227580 480
-rect 228650 -800 228762 8880
-rect 229832 -800 229944 480
+rect 228650 -800 228762 833
+rect 229832 -800 229944 8641
 rect 231014 -800 231126 480
-rect 232196 -800 232308 9080
-rect 233378 -800 233490 480
+rect 232196 -800 232308 833
+rect 233378 -800 233490 9031
 rect 234560 -800 234672 480
-rect 235742 -800 235854 9350
-rect 236924 -800 237036 480
+rect 235742 -800 235854 833
+rect 236924 -800 237036 9326
+rect 240470 9421 240582 9430
 rect 238106 -800 238218 480
-rect 239288 -800 239400 9694
-rect 240470 -800 240582 480
+rect 239288 -800 239400 833
+rect 240470 -800 240582 9309
 rect 241652 -800 241764 480
 rect 242834 -800 242946 480
 rect 244016 -800 244128 480
@@ -2566,12 +2566,12 @@
 rect 464628 17519 465028 17919
 rect 488669 17792 488969 18092
 rect 492236 16212 492348 16324
-rect 239288 9694 239400 9806
-rect 188577 8882 188633 8938
-rect 218012 7174 218124 7286
-rect 210962 6302 211018 6358
-rect 185577 4392 185633 4448
-rect 184557 2442 184613 2498
+rect 229832 8641 229944 8753
+rect 190826 5338 190938 5450
+rect 201464 3002 201576 3114
+rect 212102 6979 212214 7091
+rect 219194 7577 219306 7689
+rect 240470 9309 240582 9421
 rect 499328 16198 499440 16310
 rect 495782 14556 495894 14668
 rect 551336 10464 551448 10576
@@ -2759,19 +2759,22 @@
 rect 569270 698980 569590 698981
 rect 573179 698400 575679 699728
 rect 36240 686560 42840 686891
-rect 1360 511642 35880 511660
-rect -800 511530 35880 511642
-rect 1360 511460 35880 511530
+rect -800 511530 1104 511642
+rect 2299 511460 35880 511660
+rect -800 510348 480 510460
+rect -800 509166 480 509278
+rect 4035 508096 4147 511460
 rect 35680 510910 35880 511460
 rect 46460 684430 423480 686930
 rect 426710 695940 575679 698400
-rect -800 510348 480 510460
-rect -800 509166 480 509278
-rect -800 507984 480 508096
+rect -800 507984 4147 508096
 rect -800 506802 480 506914
 rect -800 505620 480 505732
-rect -800 468308 4516 468420
-rect -800 467126 480 467238
+rect -800 468308 956 468420
+rect 1668 468308 4516 468420
+rect 1757 467238 1869 468308
+rect -800 467126 1869 467238
+rect 1757 466982 1869 467126
 rect -800 465944 480 466056
 rect -800 464762 480 464874
 rect -800 463580 480 463692
@@ -2791,13 +2794,15 @@
 rect 28971 425081 28981 425086
 rect 29093 425081 29099 425203
 rect 4404 383031 4516 383037
-rect -800 381971 7978 381976
-rect -800 381869 7871 381971
+rect -800 381864 812 381976
+rect 1316 381971 7978 381976
+rect 1316 381869 7871 381971
 rect 7973 381869 7978 381971
-rect -800 381864 7978 381869
+rect 1316 381864 7978 381869
 rect -800 380682 698 380794
 rect -800 379500 480 379612
-rect -800 378318 480 378430
+rect 1472 378430 1584 381864
+rect -800 378318 1584 378430
 rect -800 377136 480 377248
 rect -800 375954 480 376066
 rect 40347 339445 40427 339450
@@ -2811,11 +2816,14 @@
 rect -800 335096 480 335208
 rect -800 333914 480 334026
 rect -800 332732 480 332844
-rect -800 295420 36783 295532
+rect 2027 295532 2139 295575
+rect -800 295420 1223 295532
+rect 2027 295420 36783 295532
 rect 36895 295420 36901 295532
 rect -800 294238 480 294350
 rect -800 293056 480 293168
-rect -800 291874 480 291986
+rect 2027 291986 2139 295420
+rect -800 291874 2139 291986
 rect -800 290692 480 290804
 rect -800 289510 480 289622
 rect -800 252398 30075 252510
@@ -2902,44 +2910,58 @@
 rect 583520 588290 584800 588402
 rect 583520 587108 584800 587220
 rect 583520 585926 584800 586038
-rect 583520 584744 584800 584856
+rect 582510 584856 582622 585276
+rect 582510 584744 584800 584856
+rect 582510 583674 582622 584744
 rect 581963 583562 581969 583674
-rect 582081 583562 584800 583674
+rect 582081 583562 582682 583674
+rect 583075 583562 584800 583674
 rect 582340 550562 584800 555362
 rect 582340 540562 584800 545362
 rect 583520 500050 584800 500162
 rect 583520 498868 584800 498980
-rect 583520 497686 584800 497798
+rect 578572 497686 584800 497798
+rect 577416 494252 577526 494258
+rect 578572 494252 578684 497686
 rect 583520 496504 584800 496616
 rect 583520 495322 584800 495434
-rect 577416 494252 577526 494258
 rect 577410 494140 577416 494252
-rect 577528 494140 584800 494252
+rect 577528 494140 578875 494252
+rect 583121 494140 584800 494252
 rect 577416 494134 577526 494140
+rect 578572 494131 578684 494140
 rect 583520 455628 584800 455740
 rect 583520 454446 584800 454558
 rect 583520 453264 584800 453376
 rect 583520 452082 584800 452194
-rect 583520 450900 584800 451012
-rect 582377 449830 582489 449836
-rect 582489 449718 584800 449830
-rect 582377 449712 582489 449718
+rect 582362 450900 584800 451012
+rect 582362 449836 582474 450900
+rect 582362 449830 582489 449836
+rect 582362 449718 582377 449830
+rect 582489 449718 582769 449830
+rect 583323 449718 584800 449830
+rect 582362 449712 582489 449718
+rect 582362 449632 582474 449712
 rect 583520 411206 584800 411318
 rect 583520 410024 584800 410136
-rect 583520 408842 584800 408954
+rect 579264 408842 584800 408954
+rect 579219 405408 579386 405434
+rect 579900 405408 580012 408842
 rect 583520 407660 584800 407772
 rect 583520 406478 584800 406590
-rect 579219 405408 579386 405434
 rect 579219 405296 579245 405408
-rect 579357 405296 584800 405408
+rect 579357 405296 580754 405408
+rect 581714 405296 584800 405408
 rect 579219 405273 579386 405296
 rect 583520 364784 584800 364896
 rect 583520 363602 584800 363714
 rect 583520 362420 584800 362532
+rect 574873 360168 575733 361859
 rect 583520 361238 584800 361350
-rect 583520 360056 584800 360168
-rect 574873 358986 575733 359568
-rect 574873 358874 584800 358986
+rect 574873 360056 584800 360168
+rect 574873 358986 575733 360056
+rect 574873 358874 576610 358986
+rect 583186 358874 584800 358986
 rect 560452 247960 560564 247961
 rect 560447 247850 560453 247960
 rect 560563 247850 560569 247960
@@ -3384,18 +3406,21 @@
 rect 574873 126384 575733 358874
 rect 583520 319562 584800 319674
 rect 583520 318380 584800 318492
-rect 583520 317198 584800 317310
+rect 580631 317198 580637 317310
+rect 580749 317198 584800 317310
 rect 583520 316016 584800 316128
 rect 583520 314834 584800 314946
 rect 580572 313652 580578 313764
-rect 580690 313652 584800 313764
+rect 580690 313652 581306 313764
+rect 583475 313652 584800 313764
 rect 583520 275140 584800 275252
 rect 583520 273958 584800 274070
 rect 583520 272776 584800 272888
 rect 583520 271594 584800 271706
-rect 583520 270412 584800 270524
-rect 581511 269342 581721 269367
-rect 581511 269230 584800 269342
+rect 581481 270412 584800 270524
+rect 581511 269342 581721 270412
+rect 581511 269230 582694 269342
+rect 583273 269230 584800 269342
 rect 581511 242344 581721 269230
 rect 582378 243319 582488 243324
 rect 582377 243318 582489 243319
@@ -3739,8 +3764,7 @@
 rect 182664 23564 182669 23620
 rect 182547 23559 182669 23564
 rect -800 16910 7526 17022
-rect -800 15728 182860 15840
-rect 182972 15728 182978 15840
+rect -800 15728 1743 15840
 rect -800 14546 480 14658
 rect -800 13364 480 13476
 rect -800 12182 480 12294
@@ -3757,22 +3781,7 @@
 rect 66476 6974 66481 6979
 rect 66359 6969 66481 6974
 rect -800 6272 480 6384
-rect 62869 5711 62991 5717
-rect 62869 5594 62874 5599
-rect 62986 5594 62991 5599
-rect 62869 5589 62991 5594
-rect -800 5090 480 5202
-rect 55239 4381 55361 4387
-rect 55239 4264 55244 4269
-rect 55356 4264 55361 4269
-rect 55239 4259 55361 4264
-rect -800 3908 480 4020
-rect 52329 3351 52451 3357
-rect 52329 3234 52334 3239
-rect 52446 3234 52451 3239
-rect 52329 3229 52451 3234
-rect -800 2726 480 2838
-rect 184555 2503 184615 87688
+rect 184555 5761 184615 87688
 rect 184895 79783 184955 87961
 rect 185227 86998 185233 87062
 rect 185297 86998 185303 87062
@@ -3793,7 +3802,7 @@
 rect 184900 78090 185085 78160
 rect 185165 78090 185400 78160
 rect 184900 77890 185400 78090
-rect 185575 4453 185635 87008
+rect 185575 6550 185635 87008
 rect 185907 86978 185913 87042
 rect 185977 86978 185983 87042
 rect 186247 87008 186253 87072
@@ -3816,7 +3825,7 @@
 rect 185920 78470 186085 78540
 rect 186165 78470 186330 78540
 rect 185920 78300 186330 78470
-rect 186595 6360 186655 87038
+rect 186595 7531 186655 87038
 rect 186868 86920 186874 86984
 rect 186938 86920 186944 86984
 rect 186876 84017 186936 86920
@@ -3842,7 +3851,7 @@
 rect 186843 63698 186877 63703
 rect 186941 63698 187010 63768
 rect 186843 63655 187010 63698
-rect 187555 7280 187615 87688
+rect 187555 7982 187615 87688
 rect 187895 79783 187955 87961
 rect 202750 87960 202755 88270
 rect 202750 87955 202760 87960
@@ -3869,7 +3878,7 @@
 rect 187920 78250 188085 78320
 rect 188165 78250 188330 78320
 rect 187920 78090 188330 78250
-rect 188575 8943 188635 87008
+rect 188575 8773 188635 87008
 rect 188907 86978 188913 87042
 rect 188977 86978 188983 87042
 rect 189247 87008 189253 87072
@@ -3892,7 +3901,7 @@
 rect 188920 78420 189085 78490
 rect 189165 78420 189330 78490
 rect 188920 78230 189330 78420
-rect 189595 9750 189655 87038
+rect 189595 9722 189655 87038
 rect 189868 86998 189874 87062
 rect 189938 86998 189944 87062
 rect 189876 79900 189936 86998
@@ -4591,51 +4600,84 @@
 rect 583520 15728 584800 15840
 rect 583520 14546 584800 14658
 rect 583520 13364 584800 13476
-rect 549598 12182 584800 12294
+rect 549598 12182 579996 12294
+rect 582464 12182 584800 12294
+rect 549598 10373 549710 12182
 rect 583520 11000 584800 11112
 rect 551331 10581 551453 10587
 rect 551331 10464 551336 10469
 rect 551448 10464 551453 10469
 rect 551331 10459 551453 10464
+rect 549592 10261 549598 10373
+rect 549710 10261 549716 10373
+rect 240470 9722 240582 9838
 rect 583520 9818 584800 9930
-rect 239283 9806 239405 9811
-rect 238164 9750 239288 9806
-rect 189595 9694 239288 9750
-rect 239400 9694 239405 9806
-rect 189595 9690 239220 9694
-rect 239283 9689 239405 9694
 rect 576153 9741 576275 9747
+rect 189595 9662 240653 9722
+rect 240470 9426 240582 9662
 rect 576153 9624 576158 9629
 rect 576270 9624 576275 9629
 rect 576153 9619 576275 9624
-rect 188572 8938 188638 8943
-rect 188572 8882 188577 8938
-rect 188633 8882 188638 8938
-rect 188572 8877 188638 8882
+rect 240465 9421 240587 9426
+rect 240465 9309 240470 9421
+rect 240582 9309 240587 9421
+rect 240465 9304 240587 9309
+rect 229702 8773 230366 8922
+rect 188575 8753 230662 8773
+rect 188575 8713 229832 8753
+rect 229702 8641 229832 8713
+rect 229944 8713 230662 8753
+rect 229944 8641 230460 8713
+rect 229702 8479 230366 8641
 rect 583520 8636 584800 8748
+rect 219194 7982 219306 8047
+rect 187555 7922 219853 7982
+rect 219194 7694 219306 7922
 rect 558423 7881 558545 7887
 rect 558423 7764 558428 7769
 rect 558540 7764 558545 7769
 rect 558423 7759 558545 7764
+rect 219189 7689 219311 7694
+rect 219189 7577 219194 7689
+rect 219306 7577 219311 7689
+rect 219189 7572 219311 7577
+rect 212102 7531 212214 7535
+rect 186595 7471 213015 7531
+rect 212102 7096 212214 7471
 rect 583520 7454 584800 7566
-rect 218007 7286 218129 7291
-rect 218007 7280 218012 7286
-rect 187555 7220 218012 7280
-rect 218007 7174 218012 7220
-rect 218124 7280 218129 7286
-rect 218124 7220 218530 7280
-rect 218124 7174 218129 7220
-rect 218007 7169 218129 7174
+rect 212097 7091 212219 7096
+rect 212097 6979 212102 7091
+rect 212214 6979 212219 7091
+rect 212097 6974 212219 6979
 rect 572607 6561 572729 6567
+rect 201464 6550 201576 6559
+rect 185575 6490 201579 6550
+rect 62869 5711 62991 5717
+rect 184555 5649 190938 5761
+rect 184555 5623 184615 5649
+rect 62869 5594 62874 5599
+rect 62986 5594 62991 5599
+rect 62869 5589 62991 5594
+rect 190826 5455 190938 5649
+rect 190821 5450 190943 5455
+rect 190821 5338 190826 5450
+rect 190938 5338 190943 5450
+rect 190821 5333 190943 5338
+rect -800 5090 480 5202
+rect 55239 4381 55361 4387
+rect 55239 4264 55244 4269
+rect 55356 4264 55361 4269
+rect 55239 4259 55361 4264
+rect -800 3908 42760 4020
+rect 42872 3908 42878 4020
+rect 52329 3351 52451 3357
+rect 52329 3234 52334 3239
+rect 52446 3234 52451 3239
+rect 52329 3229 52451 3234
+rect 201464 3119 201576 6490
 rect 572607 6444 572612 6449
 rect 572724 6444 572729 6449
 rect 572607 6439 572729 6444
-rect 210957 6360 211023 6363
-rect 186595 6358 211023 6360
-rect 186595 6302 210962 6358
-rect 211018 6302 211023 6358
-rect 186595 6300 211023 6302
-rect 210957 6297 211023 6300
 rect 565515 6311 565637 6317
 rect 565515 6194 565520 6199
 rect 565632 6194 565637 6199
@@ -4654,16 +4696,13 @@
 rect 562086 5964 562091 5969
 rect 561969 5959 562091 5964
 rect 583520 5090 584800 5202
-rect 185572 4448 185638 4453
-rect 185572 4392 185577 4448
-rect 185633 4392 185638 4448
-rect 185572 4387 185638 4392
 rect 583520 3908 584800 4020
+rect 201459 3114 201581 3119
+rect 201459 3002 201464 3114
+rect 201576 3002 201581 3114
+rect 201459 2997 201581 3002
+rect -800 2726 480 2838
 rect 583520 2726 584800 2838
-rect 184552 2498 184618 2503
-rect 184552 2442 184557 2498
-rect 184613 2442 184618 2498
-rect 184552 2437 184618 2442
 rect 49529 2271 49651 2277
 rect 49529 2154 49534 2159
 rect 49646 2154 49651 2159
@@ -4973,6 +5012,7 @@
 rect 573355 134685 573360 134792
 rect 573360 134685 573472 134792
 rect 573472 134685 573477 134792
+rect 580637 317198 580749 317310
 rect 580578 313652 580690 313764
 rect 582378 243314 582488 243318
 rect 582378 243212 582382 243314
@@ -5094,7 +5134,6 @@
 rect 183877 74357 183936 74417
 rect 183936 74357 183941 74417
 rect 183877 74352 183941 74357
-rect 182860 15728 182972 15840
 rect 69759 8646 69881 8651
 rect 69759 8539 69764 8646
 rect 69764 8539 69876 8646
@@ -5103,18 +5142,6 @@
 rect 66359 6979 66364 7086
 rect 66364 6979 66476 7086
 rect 66476 6979 66481 7086
-rect 62869 5706 62991 5711
-rect 62869 5599 62874 5706
-rect 62874 5599 62986 5706
-rect 62986 5599 62991 5706
-rect 55239 4376 55361 4381
-rect 55239 4269 55244 4376
-rect 55244 4269 55356 4376
-rect 55356 4269 55361 4376
-rect 52329 3346 52451 3351
-rect 52329 3239 52334 3346
-rect 52334 3239 52446 3346
-rect 52446 3239 52451 3346
 rect 185233 86998 185297 87062
 rect 185573 87008 185637 87072
 rect 185085 78095 185090 78160
@@ -5597,6 +5624,7 @@
 rect 551331 10469 551336 10576
 rect 551336 10469 551448 10576
 rect 551448 10469 551453 10576
+rect 549598 10261 549710 10373
 rect 576153 9736 576275 9741
 rect 576153 9629 576158 9736
 rect 576158 9629 576270 9736
@@ -5606,6 +5634,19 @@
 rect 558428 7769 558540 7876
 rect 558540 7769 558545 7876
 rect 572607 6556 572729 6561
+rect 62869 5706 62991 5711
+rect 62869 5599 62874 5706
+rect 62874 5599 62986 5706
+rect 62986 5599 62991 5706
+rect 55239 4376 55361 4381
+rect 55239 4269 55244 4376
+rect 55244 4269 55356 4376
+rect 55356 4269 55361 4376
+rect 42760 3908 42872 4020
+rect 52329 3346 52451 3351
+rect 52329 3239 52334 3346
+rect 52334 3239 52446 3346
+rect 52446 3239 52451 3346
 rect 572607 6449 572612 6556
 rect 572612 6449 572724 6556
 rect 572724 6449 572729 6556
@@ -7024,6 +7065,11 @@
 rect 13590 103810 13770 103990
 rect 13770 103810 13840 103990
 rect 13520 103740 13840 103810
+rect 42656 4020 42976 4124
+rect 42656 3908 42760 4020
+rect 42760 3908 42872 4020
+rect 42872 3908 42976 4020
+rect 42656 3804 42976 3908
 rect 149807 165241 234543 171677
 rect 90860 146596 91132 146868
 rect 140134 132009 142546 134421
@@ -7331,6 +7377,11 @@
 rect 225426 68825 225501 68995
 rect 225181 68750 225501 68825
 rect 204560 68566 204880 68694
+rect 580533 317310 580853 317414
+rect 580533 317198 580637 317310
+rect 580637 317198 580749 317310
+rect 580749 317198 580853 317310
+rect 580533 317094 580853 317198
 rect 580474 313764 580794 313868
 rect 580474 313652 580578 313764
 rect 580578 313652 580690 313764
@@ -7615,11 +7666,6 @@
 rect 488974 17787 488979 18087
 rect 488659 17777 488979 17787
 rect 464622 17513 465034 17514
-rect 182756 15840 183076 15944
-rect 182756 15728 182860 15840
-rect 182860 15728 182972 15840
-rect 182972 15728 183076 15840
-rect 182756 15624 183076 15728
 rect 501237 35582 501557 35642
 rect 501237 35382 501292 35582
 rect 501292 35382 501502 35582
@@ -7654,10 +7700,15 @@
 rect 533374 33822 533694 33947
 rect 521536 30706 521808 30978
 rect 551232 10581 551552 10685
+rect 549494 10373 549814 10477
+rect 549494 10261 549598 10373
+rect 549598 10261 549710 10373
+rect 549710 10261 549814 10373
 rect 551232 10469 551331 10581
 rect 551331 10469 551453 10581
 rect 551453 10469 551552 10581
 rect 551232 10365 551552 10469
+rect 549494 10157 549814 10261
 rect 576054 9741 576374 9845
 rect 576054 9629 576153 9741
 rect 576153 9629 576275 9741
@@ -7756,7 +7807,12 @@
 rect 30782 625675 30806 625995
 rect 30438 625651 30806 625675
 rect 30462 180435 30782 625651
-rect 580413 313892 580733 314248
+rect 580413 317438 580733 318938
+rect 580413 317414 580877 317438
+rect 580413 317094 580533 317414
+rect 580853 317094 580877 317414
+rect 580413 317070 580877 317094
+rect 580413 313892 580733 317070
 rect 580413 313868 580818 313892
 rect 580413 313548 580474 313868
 rect 580794 313548 580818 313868
@@ -8717,15 +8773,17 @@
 rect 465034 17615 495155 17777
 rect 465034 17513 491529 17615
 rect 462928 17442 491529 17513
-rect 182732 15944 183100 15968
-rect 182732 15624 182756 15944
-rect 183076 15624 197433 15944
-rect 182732 15600 183100 15624
 rect 551232 10709 551552 44926
 rect 551208 10685 551576 10709
+rect 549470 10477 549838 10501
+rect 483717 10157 549494 10477
+rect 549814 10157 549838 10477
 rect 551208 10365 551232 10685
 rect 551552 10365 551576 10685
 rect 551208 10341 551576 10365
+rect 42632 4124 43000 4148
+rect 483717 4124 484037 10157
+rect 549470 10133 549838 10157
 rect 554778 6289 555098 46705
 rect 558324 45717 558644 114254
 rect 561870 116606 562190 219970
@@ -8813,6 +8871,10 @@
 rect 572484 6321 572852 6345
 rect 568938 6051 569306 6075
 rect 561846 5841 562214 5865
+rect 42632 3804 42656 4124
+rect 42976 3804 486547 4124
+rect 42632 3780 43000 3804
+rect 483717 3516 484037 3804
 << comment >>
 rect -100 704000 584100 704100
 rect -100 0 0 704000
@@ -8875,8 +8937,6 @@
 port 8 nsew signal bidirectional
 flabel metal3 s 583520 313652 584800 313764 0 FreeSans 1120 0 0 0 gpio_analog[1]
 port 9 nsew signal bidirectional
-flabel metal3 s 583520 358874 584800 358986 0 FreeSans 1120 0 0 0 gpio_analog[2]
-port 10 nsew signal bidirectional
 flabel metal3 s 583520 405296 584800 405408 0 FreeSans 1120 0 0 0 gpio_analog[3]
 port 11 nsew signal bidirectional
 flabel metal3 s 583520 449718 584800 449830 0 FreeSans 1120 0 0 0 gpio_analog[4]
@@ -8993,8 +9053,6 @@
 port 55 nsew signal bidirectional
 flabel metal3 s 583520 2726 584800 2838 0 FreeSans 1120 0 0 0 io_in[0]
 port 56 nsew signal input
-flabel metal3 s 583520 408842 584800 408954 0 FreeSans 1120 0 0 0 io_in[10]
-port 57 nsew signal input
 flabel metal3 s 583520 453264 584800 453376 0 FreeSans 1120 0 0 0 io_in[11]
 port 58 nsew signal input
 flabel metal3 s 583520 497686 584800 497798 0 FreeSans 1120 0 0 0 io_in[12]
@@ -10253,6 +10311,10 @@
 port 571 nsew signal bidirectional
 flabel metal2 s 547790 -800 547902 480 0 FreeSans 1120 90 0 0 la_data_in[119]
 port 185 nsew signal input
+flabel metal3 s 583520 358874 584800 358986 0 FreeSans 1120 0 0 0 gpio_analog[2]
+port 10 nsew signal bidirectional
+flabel metal3 s 583520 408842 584800 408954 0 FreeSans 1120 0 0 0 io_in[10]
+port 57 nsew signal input
 << properties >>
 string FIXED_BBOX 0 0 584000 704000
 << end >>
diff --git a/mgmt_core_wrapper b/mgmt_core_wrapper
new file mode 160000
index 0000000..dc4c190
--- /dev/null
+++ b/mgmt_core_wrapper
@@ -0,0 +1 @@
+Subproject commit dc4c190cd1e35301837d1c719fef213d8d00f4e1
diff --git a/venv/bin/Activate.ps1 b/venv/bin/Activate.ps1
new file mode 100644
index 0000000..2fb3852
--- /dev/null
+++ b/venv/bin/Activate.ps1
@@ -0,0 +1,241 @@
+<#

+.Synopsis

+Activate a Python virtual environment for the current PowerShell session.

+

+.Description

+Pushes the python executable for a virtual environment to the front of the

+$Env:PATH environment variable and sets the prompt to signify that you are

+in a Python virtual environment. Makes use of the command line switches as

+well as the `pyvenv.cfg` file values present in the virtual environment.

+

+.Parameter VenvDir

+Path to the directory that contains the virtual environment to activate. The

+default value for this is the parent of the directory that the Activate.ps1

+script is located within.

+

+.Parameter Prompt

+The prompt prefix to display when this virtual environment is activated. By

+default, this prompt is the name of the virtual environment folder (VenvDir)

+surrounded by parentheses and followed by a single space (ie. '(.venv) ').

+

+.Example

+Activate.ps1

+Activates the Python virtual environment that contains the Activate.ps1 script.

+

+.Example

+Activate.ps1 -Verbose

+Activates the Python virtual environment that contains the Activate.ps1 script,

+and shows extra information about the activation as it executes.

+

+.Example

+Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv

+Activates the Python virtual environment located in the specified location.

+

+.Example

+Activate.ps1 -Prompt "MyPython"

+Activates the Python virtual environment that contains the Activate.ps1 script,

+and prefixes the current prompt with the specified string (surrounded in

+parentheses) while the virtual environment is active.

+

+.Notes

+On Windows, it may be required to enable this Activate.ps1 script by setting the

+execution policy for the user. You can do this by issuing the following PowerShell

+command:

+

+PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser

+

+For more information on Execution Policies: 

+https://go.microsoft.com/fwlink/?LinkID=135170

+

+#>

+Param(

+    [Parameter(Mandatory = $false)]

+    [String]

+    $VenvDir,

+    [Parameter(Mandatory = $false)]

+    [String]

+    $Prompt

+)

+

+<# Function declarations --------------------------------------------------- #>

+

+<#

+.Synopsis

+Remove all shell session elements added by the Activate script, including the

+addition of the virtual environment's Python executable from the beginning of

+the PATH variable.

+

+.Parameter NonDestructive

+If present, do not remove this function from the global namespace for the

+session.

+

+#>

+function global:deactivate ([switch]$NonDestructive) {

+    # Revert to original values

+

+    # The prior prompt:

+    if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {

+        Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt

+        Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT

+    }

+

+    # The prior PYTHONHOME:

+    if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {

+        Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME

+        Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME

+    }

+

+    # The prior PATH:

+    if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {

+        Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH

+        Remove-Item -Path Env:_OLD_VIRTUAL_PATH

+    }

+

+    # Just remove the VIRTUAL_ENV altogether:

+    if (Test-Path -Path Env:VIRTUAL_ENV) {

+        Remove-Item -Path env:VIRTUAL_ENV

+    }

+

+    # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:

+    if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {

+        Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force

+    }

+

+    # Leave deactivate function in the global namespace if requested:

+    if (-not $NonDestructive) {

+        Remove-Item -Path function:deactivate

+    }

+}

+

+<#

+.Description

+Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the

+given folder, and returns them in a map.

+

+For each line in the pyvenv.cfg file, if that line can be parsed into exactly

+two strings separated by `=` (with any amount of whitespace surrounding the =)

+then it is considered a `key = value` line. The left hand string is the key,

+the right hand is the value.

+

+If the value starts with a `'` or a `"` then the first and last character is

+stripped from the value before being captured.

+

+.Parameter ConfigDir

+Path to the directory that contains the `pyvenv.cfg` file.

+#>

+function Get-PyVenvConfig(

+    [String]

+    $ConfigDir

+) {

+    Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"

+

+    # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).

+    $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue

+

+    # An empty map will be returned if no config file is found.

+    $pyvenvConfig = @{ }

+

+    if ($pyvenvConfigPath) {

+

+        Write-Verbose "File exists, parse `key = value` lines"

+        $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath

+

+        $pyvenvConfigContent | ForEach-Object {

+            $keyval = $PSItem -split "\s*=\s*", 2

+            if ($keyval[0] -and $keyval[1]) {

+                $val = $keyval[1]

+

+                # Remove extraneous quotations around a string value.

+                if ("'""".Contains($val.Substring(0, 1))) {

+                    $val = $val.Substring(1, $val.Length - 2)

+                }

+

+                $pyvenvConfig[$keyval[0]] = $val

+                Write-Verbose "Adding Key: '$($keyval[0])'='$val'"

+            }

+        }

+    }

+    return $pyvenvConfig

+}

+

+

+<# Begin Activate script --------------------------------------------------- #>

+

+# Determine the containing directory of this script

+$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition

+$VenvExecDir = Get-Item -Path $VenvExecPath

+

+Write-Verbose "Activation script is located in path: '$VenvExecPath'"

+Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"

+Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"

+

+# Set values required in priority: CmdLine, ConfigFile, Default

+# First, get the location of the virtual environment, it might not be

+# VenvExecDir if specified on the command line.

+if ($VenvDir) {

+    Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"

+}

+else {

+    Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."

+    $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")

+    Write-Verbose "VenvDir=$VenvDir"

+}

+

+# Next, read the `pyvenv.cfg` file to determine any required value such

+# as `prompt`.

+$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir

+

+# Next, set the prompt from the command line, or the config file, or

+# just use the name of the virtual environment folder.

+if ($Prompt) {

+    Write-Verbose "Prompt specified as argument, using '$Prompt'"

+}

+else {

+    Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"

+    if ($pyvenvCfg -and $pyvenvCfg['prompt']) {

+        Write-Verbose "  Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"

+        $Prompt = $pyvenvCfg['prompt'];

+    }

+    else {

+        Write-Verbose "  Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)"

+        Write-Verbose "  Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"

+        $Prompt = Split-Path -Path $venvDir -Leaf

+    }

+}

+

+Write-Verbose "Prompt = '$Prompt'"

+Write-Verbose "VenvDir='$VenvDir'"

+

+# Deactivate any currently active virtual environment, but leave the

+# deactivate function in place.

+deactivate -nondestructive

+

+# Now set the environment variable VIRTUAL_ENV, used by many tools to determine

+# that there is an activated venv.

+$env:VIRTUAL_ENV = $VenvDir

+

+if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {

+

+    Write-Verbose "Setting prompt to '$Prompt'"

+

+    # Set the prompt to include the env name

+    # Make sure _OLD_VIRTUAL_PROMPT is global

+    function global:_OLD_VIRTUAL_PROMPT { "" }

+    Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT

+    New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt

+

+    function global:prompt {

+        Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "

+        _OLD_VIRTUAL_PROMPT

+    }

+}

+

+# Clear PYTHONHOME

+if (Test-Path -Path Env:PYTHONHOME) {

+    Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME

+    Remove-Item -Path Env:PYTHONHOME

+}

+

+# Add the venv to the PATH

+Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH

+$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"

diff --git a/venv/bin/activate b/venv/bin/activate
new file mode 100644
index 0000000..2fef21e
--- /dev/null
+++ b/venv/bin/activate
@@ -0,0 +1,66 @@
+# This file must be used with "source bin/activate" *from bash*
+# you cannot run it directly
+
+deactivate () {
+    # reset old environment variables
+    if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
+        PATH="${_OLD_VIRTUAL_PATH:-}"
+        export PATH
+        unset _OLD_VIRTUAL_PATH
+    fi
+    if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
+        PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
+        export PYTHONHOME
+        unset _OLD_VIRTUAL_PYTHONHOME
+    fi
+
+    # This should detect bash and zsh, which have a hash command that must
+    # be called to get it to forget past commands.  Without forgetting
+    # past commands the $PATH changes we made may not be respected
+    if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
+        hash -r 2> /dev/null
+    fi
+
+    if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
+        PS1="${_OLD_VIRTUAL_PS1:-}"
+        export PS1
+        unset _OLD_VIRTUAL_PS1
+    fi
+
+    unset VIRTUAL_ENV
+    if [ ! "${1:-}" = "nondestructive" ] ; then
+    # Self destruct!
+        unset -f deactivate
+    fi
+}
+
+# unset irrelevant variables
+deactivate nondestructive
+
+VIRTUAL_ENV="/home/hni/TopmetalSe-DPS/venv"
+export VIRTUAL_ENV
+
+_OLD_VIRTUAL_PATH="$PATH"
+PATH="$VIRTUAL_ENV/bin:$PATH"
+export PATH
+
+# unset PYTHONHOME if set
+# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
+# could use `if (set -u; : $PYTHONHOME) ;` in bash
+if [ -n "${PYTHONHOME:-}" ] ; then
+    _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
+    unset PYTHONHOME
+fi
+
+if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
+    _OLD_VIRTUAL_PS1="${PS1:-}"
+    PS1="(venv) ${PS1:-}"
+    export PS1
+fi
+
+# This should detect bash and zsh, which have a hash command that must
+# be called to get it to forget past commands.  Without forgetting
+# past commands the $PATH changes we made may not be respected
+if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
+    hash -r 2> /dev/null
+fi
diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh
new file mode 100644
index 0000000..c2965b1
--- /dev/null
+++ b/venv/bin/activate.csh
@@ -0,0 +1,25 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi <davidedb@gmail.com>.
+# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV "/home/hni/TopmetalSe-DPS/venv"
+
+set _OLD_VIRTUAL_PATH="$PATH"
+setenv PATH "$VIRTUAL_ENV/bin:$PATH"
+
+
+set _OLD_VIRTUAL_PROMPT="$prompt"
+
+if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
+    set prompt = "(venv) $prompt"
+endif
+
+alias pydoc python -m pydoc
+
+rehash
diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish
new file mode 100644
index 0000000..028294e
--- /dev/null
+++ b/venv/bin/activate.fish
@@ -0,0 +1,64 @@
+# This file must be used with "source <venv>/bin/activate.fish" *from fish*
+# (https://fishshell.com/); you cannot run it directly.
+
+function deactivate  -d "Exit virtual environment and return to normal shell environment"
+    # reset old environment variables
+    if test -n "$_OLD_VIRTUAL_PATH"
+        set -gx PATH $_OLD_VIRTUAL_PATH
+        set -e _OLD_VIRTUAL_PATH
+    end
+    if test -n "$_OLD_VIRTUAL_PYTHONHOME"
+        set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
+        set -e _OLD_VIRTUAL_PYTHONHOME
+    end
+
+    if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
+        functions -e fish_prompt
+        set -e _OLD_FISH_PROMPT_OVERRIDE
+        functions -c _old_fish_prompt fish_prompt
+        functions -e _old_fish_prompt
+    end
+
+    set -e VIRTUAL_ENV
+    if test "$argv[1]" != "nondestructive"
+        # Self-destruct!
+        functions -e deactivate
+    end
+end
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+set -gx VIRTUAL_ENV "/home/hni/TopmetalSe-DPS/venv"
+
+set -gx _OLD_VIRTUAL_PATH $PATH
+set -gx PATH "$VIRTUAL_ENV/bin" $PATH
+
+# Unset PYTHONHOME if set.
+if set -q PYTHONHOME
+    set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
+    set -e PYTHONHOME
+end
+
+if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
+    # fish uses a function instead of an env var to generate the prompt.
+
+    # Save the current fish_prompt function as the function _old_fish_prompt.
+    functions -c fish_prompt _old_fish_prompt
+
+    # With the original prompt function renamed, we can override with our own.
+    function fish_prompt
+        # Save the return status of the last command.
+        set -l old_status $status
+
+        # Output the venv prompt; color taken from the blue of the Python logo.
+        printf "%s%s%s" (set_color 4B8BBE) "(venv) " (set_color normal)
+
+        # Restore the return status of the previous command.
+        echo "exit $old_status" | .
+        # Output the original/"old" prompt.
+        _old_fish_prompt
+    end
+
+    set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
+end
diff --git a/venv/bin/easy_install b/venv/bin/easy_install
new file mode 100755
index 0000000..b9cf288
--- /dev/null
+++ b/venv/bin/easy_install
@@ -0,0 +1,8 @@
+#!/home/hni/TopmetalSe-DPS/venv/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from setuptools.command.easy_install import main
+if __name__ == '__main__':
+    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+    sys.exit(main())
diff --git a/venv/bin/easy_install-3.9 b/venv/bin/easy_install-3.9
new file mode 100755
index 0000000..b9cf288
--- /dev/null
+++ b/venv/bin/easy_install-3.9
@@ -0,0 +1,8 @@
+#!/home/hni/TopmetalSe-DPS/venv/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from setuptools.command.easy_install import main
+if __name__ == '__main__':
+    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+    sys.exit(main())
diff --git a/venv/bin/pip b/venv/bin/pip
new file mode 100755
index 0000000..8c00378
--- /dev/null
+++ b/venv/bin/pip
@@ -0,0 +1,8 @@
+#!/home/hni/TopmetalSe-DPS/venv/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from pip._internal.cli.main import main
+if __name__ == '__main__':
+    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+    sys.exit(main())
diff --git a/venv/bin/pip3 b/venv/bin/pip3
new file mode 100755
index 0000000..8c00378
--- /dev/null
+++ b/venv/bin/pip3
@@ -0,0 +1,8 @@
+#!/home/hni/TopmetalSe-DPS/venv/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from pip._internal.cli.main import main
+if __name__ == '__main__':
+    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+    sys.exit(main())
diff --git a/venv/bin/pip3.10 b/venv/bin/pip3.10
new file mode 100755
index 0000000..8c00378
--- /dev/null
+++ b/venv/bin/pip3.10
@@ -0,0 +1,8 @@
+#!/home/hni/TopmetalSe-DPS/venv/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from pip._internal.cli.main import main
+if __name__ == '__main__':
+    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+    sys.exit(main())
diff --git a/venv/bin/pip3.9 b/venv/bin/pip3.9
new file mode 100755
index 0000000..8c00378
--- /dev/null
+++ b/venv/bin/pip3.9
@@ -0,0 +1,8 @@
+#!/home/hni/TopmetalSe-DPS/venv/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from pip._internal.cli.main import main
+if __name__ == '__main__':
+    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+    sys.exit(main())
diff --git a/venv/bin/python b/venv/bin/python
new file mode 120000
index 0000000..b8a0adb
--- /dev/null
+++ b/venv/bin/python
@@ -0,0 +1 @@
+python3
\ No newline at end of file
diff --git a/venv/bin/python3 b/venv/bin/python3
new file mode 120000
index 0000000..b48b94c
--- /dev/null
+++ b/venv/bin/python3
@@ -0,0 +1 @@
+/opt/OpenICEDA/OICvenv/bin/python3
\ No newline at end of file
diff --git a/venv/bin/python3.9 b/venv/bin/python3.9
new file mode 120000
index 0000000..b8a0adb
--- /dev/null
+++ b/venv/bin/python3.9
@@ -0,0 +1 @@
+python3
\ No newline at end of file
diff --git a/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/INSTALLER b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst
new file mode 100644
index 0000000..c37cae4
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2007 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1.  Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+2.  Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+
+3.  Neither the name of the copyright holder nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/METADATA b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/METADATA
new file mode 100644
index 0000000..f54bb5c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/METADATA
@@ -0,0 +1,113 @@
+Metadata-Version: 2.1
+Name: Jinja2
+Version: 3.1.2
+Summary: A very fast and expressive template engine.
+Home-page: https://palletsprojects.com/p/jinja/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://jinja.palletsprojects.com/
+Project-URL: Changes, https://jinja.palletsprojects.com/changes/
+Project-URL: Source Code, https://github.com/pallets/jinja/
+Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/
+Project-URL: Twitter, https://twitter.com/PalletsTeam
+Project-URL: Chat, https://discord.gg/pallets
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+Requires-Dist: MarkupSafe (>=2.0)
+Provides-Extra: i18n
+Requires-Dist: Babel (>=2.7) ; extra == 'i18n'
+
+Jinja
+=====
+
+Jinja is a fast, expressive, extensible templating engine. Special
+placeholders in the template allow writing code similar to Python
+syntax. Then the template is passed data to render the final document.
+
+It includes:
+
+-   Template inheritance and inclusion.
+-   Define and import macros within templates.
+-   HTML templates can use autoescaping to prevent XSS from untrusted
+    user input.
+-   A sandboxed environment can safely render untrusted templates.
+-   AsyncIO support for generating templates and calling async
+    functions.
+-   I18N support with Babel.
+-   Templates are compiled to optimized Python code just-in-time and
+    cached, or can be compiled ahead-of-time.
+-   Exceptions point to the correct line in templates to make debugging
+    easier.
+-   Extensible filters, tests, functions, and even syntax.
+
+Jinja's philosophy is that while application logic belongs in Python if
+possible, it shouldn't make the template designer's job difficult by
+restricting functionality too much.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+    $ pip install -U Jinja2
+
+.. _pip: https://pip.pypa.io/en/stable/getting-started/
+
+
+In A Nutshell
+-------------
+
+.. code-block:: jinja
+
+    {% extends "base.html" %}
+    {% block title %}Members{% endblock %}
+    {% block content %}
+      <ul>
+      {% for user in users %}
+        <li><a href="{{ user.url }}">{{ user.username }}</a></li>
+      {% endfor %}
+      </ul>
+    {% endblock %}
+
+
+Donate
+------
+
+The Pallets organization develops and supports Jinja and other popular
+packages. In order to grow the community of contributors and users, and
+allow the maintainers to devote more time to the projects, `please
+donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+-   Documentation: https://jinja.palletsprojects.com/
+-   Changes: https://jinja.palletsprojects.com/changes/
+-   PyPI Releases: https://pypi.org/project/Jinja2/
+-   Source Code: https://github.com/pallets/jinja/
+-   Issue Tracker: https://github.com/pallets/jinja/issues/
+-   Website: https://palletsprojects.com/p/jinja/
+-   Twitter: https://twitter.com/PalletsTeam
+-   Chat: https://discord.gg/pallets
+
+
diff --git a/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/RECORD b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/RECORD
new file mode 100644
index 0000000..24b49f1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/RECORD
@@ -0,0 +1,58 @@
+Jinja2-3.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4

+Jinja2-3.1.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475

+Jinja2-3.1.2.dist-info/METADATA,sha256=PZ6v2SIidMNixR7MRUX9f7ZWsPwtXanknqiZUmRbh4U,3539

+Jinja2-3.1.2.dist-info/RECORD,,

+Jinja2-3.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92

+Jinja2-3.1.2.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59

+Jinja2-3.1.2.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7

+jinja2/__init__.py,sha256=8vGduD8ytwgD6GDSqpYc2m3aU-T7PKOAddvVXgGr_Fs,1927

+jinja2/__pycache__/__init__.cpython-39.pyc,,

+jinja2/__pycache__/_identifier.cpython-39.pyc,,

+jinja2/__pycache__/async_utils.cpython-39.pyc,,

+jinja2/__pycache__/bccache.cpython-39.pyc,,

+jinja2/__pycache__/compiler.cpython-39.pyc,,

+jinja2/__pycache__/constants.cpython-39.pyc,,

+jinja2/__pycache__/debug.cpython-39.pyc,,

+jinja2/__pycache__/defaults.cpython-39.pyc,,

+jinja2/__pycache__/environment.cpython-39.pyc,,

+jinja2/__pycache__/exceptions.cpython-39.pyc,,

+jinja2/__pycache__/ext.cpython-39.pyc,,

+jinja2/__pycache__/filters.cpython-39.pyc,,

+jinja2/__pycache__/idtracking.cpython-39.pyc,,

+jinja2/__pycache__/lexer.cpython-39.pyc,,

+jinja2/__pycache__/loaders.cpython-39.pyc,,

+jinja2/__pycache__/meta.cpython-39.pyc,,

+jinja2/__pycache__/nativetypes.cpython-39.pyc,,

+jinja2/__pycache__/nodes.cpython-39.pyc,,

+jinja2/__pycache__/optimizer.cpython-39.pyc,,

+jinja2/__pycache__/parser.cpython-39.pyc,,

+jinja2/__pycache__/runtime.cpython-39.pyc,,

+jinja2/__pycache__/sandbox.cpython-39.pyc,,

+jinja2/__pycache__/tests.cpython-39.pyc,,

+jinja2/__pycache__/utils.cpython-39.pyc,,

+jinja2/__pycache__/visitor.cpython-39.pyc,,

+jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958

+jinja2/async_utils.py,sha256=dHlbTeaxFPtAOQEYOGYh_PHcDT0rsDaUJAFDl_0XtTg,2472

+jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061

+jinja2/compiler.py,sha256=Gs-N8ThJ7OWK4-reKoO8Wh1ZXz95MVphBKNVf75qBr8,72172

+jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433

+jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299

+jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267

+jinja2/environment.py,sha256=6uHIcc7ZblqOMdx_uYNKqRnnwAF0_nzbyeMP9FFtuh4,61349

+jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071

+jinja2/ext.py,sha256=ivr3P7LKbddiXDVez20EflcO3q2aHQwz9P_PgWGHVqE,31502

+jinja2/filters.py,sha256=9js1V-h2RlyW90IhLiBGLM2U-k6SCy2F4BUUMgB3K9Q,53509

+jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704

+jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726

+jinja2/loaders.py,sha256=BfptfvTVpClUd-leMkHczdyPNYFzp_n7PKOJ98iyHOg,23207

+jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396

+jinja2/nativetypes.py,sha256=DXgORDPRmVWgy034H0xL8eF7qYoK3DrMxs-935d0Fzk,4226

+jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550

+jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650

+jinja2/parser.py,sha256=nHd-DFHbiygvfaPtm9rcQXJChZG7DPsWfiEsqfwKerY,39595

+jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+jinja2/runtime.py,sha256=5CmD5BjbEJxSiDNTFBeKCaq8qU4aYD2v6q2EluyExms,33476

+jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584

+jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905

+jinja2/utils.py,sha256=u9jXESxGn8ATZNVolwmkjUVu4SA-tLgV0W7PcSfPfdQ,23965

+jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568

diff --git a/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/WHEEL b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/WHEEL
new file mode 100644
index 0000000..becc9a6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt
new file mode 100644
index 0000000..7b9666c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[babel.extractors]
+jinja2 = jinja2.ext:babel_extract[i18n]
diff --git a/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/top_level.txt b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/top_level.txt
new file mode 100644
index 0000000..7f7afbf
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/Jinja2-3.1.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+jinja2
diff --git a/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst
new file mode 100644
index 0000000..9d227a0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2010 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1.  Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+2.  Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+
+3.  Neither the name of the copyright holder nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/METADATA b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/METADATA
new file mode 100644
index 0000000..485a5e0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/METADATA
@@ -0,0 +1,101 @@
+Metadata-Version: 2.1
+Name: MarkupSafe
+Version: 2.1.1
+Summary: Safely add untrusted strings to HTML/XML markup.
+Home-page: https://palletsprojects.com/p/markupsafe/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://markupsafe.palletsprojects.com/
+Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
+Project-URL: Source Code, https://github.com/pallets/markupsafe/
+Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/
+Project-URL: Twitter, https://twitter.com/PalletsTeam
+Project-URL: Chat, https://discord.gg/pallets
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+
+MarkupSafe
+==========
+
+MarkupSafe implements a text object that escapes characters so it is
+safe to use in HTML and XML. Characters that have special meanings are
+replaced so that they display as the actual characters. This mitigates
+injection attacks, meaning untrusted user input can safely be displayed
+on a page.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+    pip install -U MarkupSafe
+
+.. _pip: https://pip.pypa.io/en/stable/getting-started/
+
+
+Examples
+--------
+
+.. code-block:: pycon
+
+    >>> from markupsafe import Markup, escape
+
+    >>> # escape replaces special characters and wraps in Markup
+    >>> escape("<script>alert(document.cookie);</script>")
+    Markup('&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
+
+    >>> # wrap in Markup to mark text "safe" and prevent escaping
+    >>> Markup("<strong>Hello</strong>")
+    Markup('<strong>hello</strong>')
+
+    >>> escape(Markup("<strong>Hello</strong>"))
+    Markup('<strong>hello</strong>')
+
+    >>> # Markup is a str subclass
+    >>> # methods and operators escape their arguments
+    >>> template = Markup("Hello <em>{name}</em>")
+    >>> template.format(name='"World"')
+    Markup('Hello <em>&#34;World&#34;</em>')
+
+
+Donate
+------
+
+The Pallets organization develops and supports MarkupSafe and other
+popular packages. In order to grow the community of contributors and
+users, and allow the maintainers to devote more time to the projects,
+`please donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+-   Documentation: https://markupsafe.palletsprojects.com/
+-   Changes: https://markupsafe.palletsprojects.com/changes/
+-   PyPI Releases: https://pypi.org/project/MarkupSafe/
+-   Source Code: https://github.com/pallets/markupsafe/
+-   Issue Tracker: https://github.com/pallets/markupsafe/issues/
+-   Website: https://palletsprojects.com/p/markupsafe/
+-   Twitter: https://twitter.com/PalletsTeam
+-   Chat: https://discord.gg/pallets
+
+
diff --git a/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/RECORD b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/RECORD
new file mode 100644
index 0000000..4fdcfc2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/RECORD
@@ -0,0 +1,14 @@
+MarkupSafe-2.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4

+MarkupSafe-2.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475

+MarkupSafe-2.1.1.dist-info/METADATA,sha256=DC93VszmzjLQcrVChRUjtW4XbUwjTdbaplpgdlbFdbs,3242

+MarkupSafe-2.1.1.dist-info/RECORD,,

+MarkupSafe-2.1.1.dist-info/WHEEL,sha256=ts1NGDem03kTrzsJp50lKy9cpDxDoGil0Q-wLa_TR_0,148

+MarkupSafe-2.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11

+markupsafe/__init__.py,sha256=xfaUQkKNRTdYWe6HnnJ2HjguFmS-C_0H6g8-Q9VAfkQ,9284

+markupsafe/__pycache__/__init__.cpython-39.pyc,,

+markupsafe/__pycache__/_native.cpython-39.pyc,,

+markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713

+markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083

+markupsafe/_speedups.cpython-39-x86_64-linux-gnu.so,sha256=DCWkK-C94Ojd4_RrL6I_91Jl0DsomXQ8XdhfOpXhKxo,44008

+markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229

+markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

diff --git a/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL
new file mode 100644
index 0000000..ade3730
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: false
+Tag: cp39-cp39-manylinux_2_17_x86_64
+Tag: cp39-cp39-manylinux2014_x86_64
+
diff --git a/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt
new file mode 100644
index 0000000..75bf729
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+markupsafe
diff --git a/venv/lib/python3.9/site-packages/__pycache__/easy_install.cpython-39.pyc b/venv/lib/python3.9/site-packages/__pycache__/easy_install.cpython-39.pyc
new file mode 100644
index 0000000..e344f99
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/__pycache__/easy_install.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/INSTALLER b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/LICENSE.rst b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/LICENSE.rst
new file mode 100644
index 0000000..d12a849
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2014 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1.  Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+2.  Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+
+3.  Neither the name of the copyright holder nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/METADATA b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/METADATA
new file mode 100644
index 0000000..8e5dc1e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/METADATA
@@ -0,0 +1,111 @@
+Metadata-Version: 2.1
+Name: click
+Version: 8.1.3
+Summary: Composable command line interface toolkit
+Home-page: https://palletsprojects.com/p/click/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+Maintainer: Pallets
+Maintainer-email: contact@palletsprojects.com
+License: BSD-3-Clause
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Documentation, https://click.palletsprojects.com/
+Project-URL: Changes, https://click.palletsprojects.com/changes/
+Project-URL: Source Code, https://github.com/pallets/click/
+Project-URL: Issue Tracker, https://github.com/pallets/click/issues/
+Project-URL: Twitter, https://twitter.com/PalletsTeam
+Project-URL: Chat, https://discord.gg/pallets
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+Requires-Dist: colorama ; platform_system == "Windows"
+Requires-Dist: importlib-metadata ; python_version < "3.8"
+
+\$ click\_
+==========
+
+Click is a Python package for creating beautiful command line interfaces
+in a composable way with as little code as necessary. It's the "Command
+Line Interface Creation Kit". It's highly configurable but comes with
+sensible defaults out of the box.
+
+It aims to make the process of writing command line tools quick and fun
+while also preventing any frustration caused by the inability to
+implement an intended CLI API.
+
+Click in three points:
+
+-   Arbitrary nesting of commands
+-   Automatic help page generation
+-   Supports lazy loading of subcommands at runtime
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+    $ pip install -U click
+
+.. _pip: https://pip.pypa.io/en/stable/getting-started/
+
+
+A Simple Example
+----------------
+
+.. code-block:: python
+
+    import click
+
+    @click.command()
+    @click.option("--count", default=1, help="Number of greetings.")
+    @click.option("--name", prompt="Your name", help="The person to greet.")
+    def hello(count, name):
+        """Simple program that greets NAME for a total of COUNT times."""
+        for _ in range(count):
+            click.echo(f"Hello, {name}!")
+
+    if __name__ == '__main__':
+        hello()
+
+.. code-block:: text
+
+    $ python hello.py --count=3
+    Your name: Click
+    Hello, Click!
+    Hello, Click!
+    Hello, Click!
+
+
+Donate
+------
+
+The Pallets organization develops and supports Click and other popular
+packages. In order to grow the community of contributors and users, and
+allow the maintainers to devote more time to the projects, `please
+donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+-   Documentation: https://click.palletsprojects.com/
+-   Changes: https://click.palletsprojects.com/changes/
+-   PyPI Releases: https://pypi.org/project/click/
+-   Source Code: https://github.com/pallets/click
+-   Issue Tracker: https://github.com/pallets/click/issues
+-   Website: https://palletsprojects.com/p/click
+-   Twitter: https://twitter.com/PalletsTeam
+-   Chat: https://discord.gg/pallets
+
+
diff --git a/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/RECORD b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/RECORD
new file mode 100644
index 0000000..3e2aae0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/RECORD
@@ -0,0 +1,40 @@
+click-8.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4

+click-8.1.3.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475

+click-8.1.3.dist-info/METADATA,sha256=tFJIX5lOjx7c5LjZbdTPFVDJSgyv9F74XY0XCPp_gnc,3247

+click-8.1.3.dist-info/RECORD,,

+click-8.1.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+click-8.1.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92

+click-8.1.3.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6

+click/__init__.py,sha256=rQBLutqg-z6m8nOzivIfigDn_emijB_dKv9BZ2FNi5s,3138

+click/__pycache__/__init__.cpython-39.pyc,,

+click/__pycache__/_compat.cpython-39.pyc,,

+click/__pycache__/_termui_impl.cpython-39.pyc,,

+click/__pycache__/_textwrap.cpython-39.pyc,,

+click/__pycache__/_winconsole.cpython-39.pyc,,

+click/__pycache__/core.cpython-39.pyc,,

+click/__pycache__/decorators.cpython-39.pyc,,

+click/__pycache__/exceptions.cpython-39.pyc,,

+click/__pycache__/formatting.cpython-39.pyc,,

+click/__pycache__/globals.cpython-39.pyc,,

+click/__pycache__/parser.cpython-39.pyc,,

+click/__pycache__/shell_completion.cpython-39.pyc,,

+click/__pycache__/termui.cpython-39.pyc,,

+click/__pycache__/testing.cpython-39.pyc,,

+click/__pycache__/types.cpython-39.pyc,,

+click/__pycache__/utils.cpython-39.pyc,,

+click/_compat.py,sha256=JIHLYs7Jzz4KT9t-ds4o4jBzLjnwCiJQKqur-5iwCKI,18810

+click/_termui_impl.py,sha256=qK6Cfy4mRFxvxE8dya8RBhLpSC8HjF-lvBc6aNrPdwg,23451

+click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353

+click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860

+click/core.py,sha256=mz87bYEKzIoNYEa56BFAiOJnvt1Y0L-i7wD4_ZecieE,112782

+click/decorators.py,sha256=yo3zvzgUm5q7h5CXjyV6q3h_PJAiUaem178zXwdWUFI,16350

+click/exceptions.py,sha256=7gDaLGuFZBeCNwY9ERMsF2-Z3R9Fvq09Zc6IZSKjseo,9167

+click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706

+click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961

+click/parser.py,sha256=cAEt1uQR8gq3-S9ysqbVU-fdAZNvilxw4ReJ_T1OQMk,19044

+click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+click/shell_completion.py,sha256=qOp_BeC9esEOSZKyu5G7RIxEUaLsXUX-mTb7hB1r4QY,18018

+click/termui.py,sha256=ACBQVOvFCTSqtD5VREeCAdRtlHd-Imla-Lte4wSfMjA,28355

+click/testing.py,sha256=ptpMYgRY7dVfE3UDgkgwayu9ePw98sQI3D7zZXiCpj4,16063

+click/types.py,sha256=rEb1aZSQKq3ciCMmjpG2Uva9vk498XRL7ThrcK2GRss,35805

+click/utils.py,sha256=33D6E7poH_nrKB-xr-UyDEXnxOcCiQqxuRLtrqeVv6o,18682

diff --git a/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/REQUESTED b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/REQUESTED
diff --git a/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/WHEEL b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/WHEEL
new file mode 100644
index 0000000..becc9a6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/top_level.txt b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/top_level.txt
new file mode 100644
index 0000000..dca9a90
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click-8.1.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+click
diff --git a/venv/lib/python3.9/site-packages/click/__init__.py b/venv/lib/python3.9/site-packages/click/__init__.py
new file mode 100644
index 0000000..e3ef423
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__init__.py
@@ -0,0 +1,73 @@
+"""
+Click is a simple Python module inspired by the stdlib optparse to make
+writing command line scripts fun. Unlike other modules, it's based
+around a simple API that does not come with too much magic and is
+composable.
+"""
+from .core import Argument as Argument
+from .core import BaseCommand as BaseCommand
+from .core import Command as Command
+from .core import CommandCollection as CommandCollection
+from .core import Context as Context
+from .core import Group as Group
+from .core import MultiCommand as MultiCommand
+from .core import Option as Option
+from .core import Parameter as Parameter
+from .decorators import argument as argument
+from .decorators import command as command
+from .decorators import confirmation_option as confirmation_option
+from .decorators import group as group
+from .decorators import help_option as help_option
+from .decorators import make_pass_decorator as make_pass_decorator
+from .decorators import option as option
+from .decorators import pass_context as pass_context
+from .decorators import pass_obj as pass_obj
+from .decorators import password_option as password_option
+from .decorators import version_option as version_option
+from .exceptions import Abort as Abort
+from .exceptions import BadArgumentUsage as BadArgumentUsage
+from .exceptions import BadOptionUsage as BadOptionUsage
+from .exceptions import BadParameter as BadParameter
+from .exceptions import ClickException as ClickException
+from .exceptions import FileError as FileError
+from .exceptions import MissingParameter as MissingParameter
+from .exceptions import NoSuchOption as NoSuchOption
+from .exceptions import UsageError as UsageError
+from .formatting import HelpFormatter as HelpFormatter
+from .formatting import wrap_text as wrap_text
+from .globals import get_current_context as get_current_context
+from .parser import OptionParser as OptionParser
+from .termui import clear as clear
+from .termui import confirm as confirm
+from .termui import echo_via_pager as echo_via_pager
+from .termui import edit as edit
+from .termui import getchar as getchar
+from .termui import launch as launch
+from .termui import pause as pause
+from .termui import progressbar as progressbar
+from .termui import prompt as prompt
+from .termui import secho as secho
+from .termui import style as style
+from .termui import unstyle as unstyle
+from .types import BOOL as BOOL
+from .types import Choice as Choice
+from .types import DateTime as DateTime
+from .types import File as File
+from .types import FLOAT as FLOAT
+from .types import FloatRange as FloatRange
+from .types import INT as INT
+from .types import IntRange as IntRange
+from .types import ParamType as ParamType
+from .types import Path as Path
+from .types import STRING as STRING
+from .types import Tuple as Tuple
+from .types import UNPROCESSED as UNPROCESSED
+from .types import UUID as UUID
+from .utils import echo as echo
+from .utils import format_filename as format_filename
+from .utils import get_app_dir as get_app_dir
+from .utils import get_binary_stream as get_binary_stream
+from .utils import get_text_stream as get_text_stream
+from .utils import open_file as open_file
+
+__version__ = "8.1.3"
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..fa3533b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/_compat.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/_compat.cpython-39.pyc
new file mode 100644
index 0000000..e847ec5
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/_compat.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/_termui_impl.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/_termui_impl.cpython-39.pyc
new file mode 100644
index 0000000..d8b49e9
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/_termui_impl.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/_textwrap.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/_textwrap.cpython-39.pyc
new file mode 100644
index 0000000..8a819a8
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/_textwrap.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/_winconsole.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/_winconsole.cpython-39.pyc
new file mode 100644
index 0000000..78bb6a2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/_winconsole.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/core.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/core.cpython-39.pyc
new file mode 100644
index 0000000..4a7b783
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/core.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/decorators.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/decorators.cpython-39.pyc
new file mode 100644
index 0000000..ce0d959
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/decorators.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/exceptions.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/exceptions.cpython-39.pyc
new file mode 100644
index 0000000..6155613
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/exceptions.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/formatting.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/formatting.cpython-39.pyc
new file mode 100644
index 0000000..e1f179b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/formatting.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/globals.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/globals.cpython-39.pyc
new file mode 100644
index 0000000..cca4a42
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/globals.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/parser.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/parser.cpython-39.pyc
new file mode 100644
index 0000000..599b207
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/parser.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/shell_completion.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/shell_completion.cpython-39.pyc
new file mode 100644
index 0000000..5f755cf
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/shell_completion.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/termui.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/termui.cpython-39.pyc
new file mode 100644
index 0000000..245c5b3
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/termui.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/testing.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/testing.cpython-39.pyc
new file mode 100644
index 0000000..da2cf7c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/testing.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/types.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/types.cpython-39.pyc
new file mode 100644
index 0000000..7d7c946
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/types.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/__pycache__/utils.cpython-39.pyc b/venv/lib/python3.9/site-packages/click/__pycache__/utils.cpython-39.pyc
new file mode 100644
index 0000000..2c0a6da
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/__pycache__/utils.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/click/_compat.py b/venv/lib/python3.9/site-packages/click/_compat.py
new file mode 100644
index 0000000..766d286
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/_compat.py
@@ -0,0 +1,626 @@
+import codecs
+import io
+import os
+import re
+import sys
+import typing as t
+from weakref import WeakKeyDictionary
+
+CYGWIN = sys.platform.startswith("cygwin")
+MSYS2 = sys.platform.startswith("win") and ("GCC" in sys.version)
+# Determine local App Engine environment, per Google's own suggestion
+APP_ENGINE = "APPENGINE_RUNTIME" in os.environ and "Development/" in os.environ.get(
+    "SERVER_SOFTWARE", ""
+)
+WIN = sys.platform.startswith("win") and not APP_ENGINE and not MSYS2
+auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None
+_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]")
+
+
+def get_filesystem_encoding() -> str:
+    return sys.getfilesystemencoding() or sys.getdefaultencoding()
+
+
+def _make_text_stream(
+    stream: t.BinaryIO,
+    encoding: t.Optional[str],
+    errors: t.Optional[str],
+    force_readable: bool = False,
+    force_writable: bool = False,
+) -> t.TextIO:
+    if encoding is None:
+        encoding = get_best_encoding(stream)
+    if errors is None:
+        errors = "replace"
+    return _NonClosingTextIOWrapper(
+        stream,
+        encoding,
+        errors,
+        line_buffering=True,
+        force_readable=force_readable,
+        force_writable=force_writable,
+    )
+
+
+def is_ascii_encoding(encoding: str) -> bool:
+    """Checks if a given encoding is ascii."""
+    try:
+        return codecs.lookup(encoding).name == "ascii"
+    except LookupError:
+        return False
+
+
+def get_best_encoding(stream: t.IO) -> str:
+    """Returns the default stream encoding if not found."""
+    rv = getattr(stream, "encoding", None) or sys.getdefaultencoding()
+    if is_ascii_encoding(rv):
+        return "utf-8"
+    return rv
+
+
+class _NonClosingTextIOWrapper(io.TextIOWrapper):
+    def __init__(
+        self,
+        stream: t.BinaryIO,
+        encoding: t.Optional[str],
+        errors: t.Optional[str],
+        force_readable: bool = False,
+        force_writable: bool = False,
+        **extra: t.Any,
+    ) -> None:
+        self._stream = stream = t.cast(
+            t.BinaryIO, _FixupStream(stream, force_readable, force_writable)
+        )
+        super().__init__(stream, encoding, errors, **extra)
+
+    def __del__(self) -> None:
+        try:
+            self.detach()
+        except Exception:
+            pass
+
+    def isatty(self) -> bool:
+        # https://bitbucket.org/pypy/pypy/issue/1803
+        return self._stream.isatty()
+
+
+class _FixupStream:
+    """The new io interface needs more from streams than streams
+    traditionally implement.  As such, this fix-up code is necessary in
+    some circumstances.
+
+    The forcing of readable and writable flags are there because some tools
+    put badly patched objects on sys (one such offender are certain version
+    of jupyter notebook).
+    """
+
+    def __init__(
+        self,
+        stream: t.BinaryIO,
+        force_readable: bool = False,
+        force_writable: bool = False,
+    ):
+        self._stream = stream
+        self._force_readable = force_readable
+        self._force_writable = force_writable
+
+    def __getattr__(self, name: str) -> t.Any:
+        return getattr(self._stream, name)
+
+    def read1(self, size: int) -> bytes:
+        f = getattr(self._stream, "read1", None)
+
+        if f is not None:
+            return t.cast(bytes, f(size))
+
+        return self._stream.read(size)
+
+    def readable(self) -> bool:
+        if self._force_readable:
+            return True
+        x = getattr(self._stream, "readable", None)
+        if x is not None:
+            return t.cast(bool, x())
+        try:
+            self._stream.read(0)
+        except Exception:
+            return False
+        return True
+
+    def writable(self) -> bool:
+        if self._force_writable:
+            return True
+        x = getattr(self._stream, "writable", None)
+        if x is not None:
+            return t.cast(bool, x())
+        try:
+            self._stream.write("")  # type: ignore
+        except Exception:
+            try:
+                self._stream.write(b"")
+            except Exception:
+                return False
+        return True
+
+    def seekable(self) -> bool:
+        x = getattr(self._stream, "seekable", None)
+        if x is not None:
+            return t.cast(bool, x())
+        try:
+            self._stream.seek(self._stream.tell())
+        except Exception:
+            return False
+        return True
+
+
+def _is_binary_reader(stream: t.IO, default: bool = False) -> bool:
+    try:
+        return isinstance(stream.read(0), bytes)
+    except Exception:
+        return default
+        # This happens in some cases where the stream was already
+        # closed.  In this case, we assume the default.
+
+
+def _is_binary_writer(stream: t.IO, default: bool = False) -> bool:
+    try:
+        stream.write(b"")
+    except Exception:
+        try:
+            stream.write("")
+            return False
+        except Exception:
+            pass
+        return default
+    return True
+
+
+def _find_binary_reader(stream: t.IO) -> t.Optional[t.BinaryIO]:
+    # We need to figure out if the given stream is already binary.
+    # This can happen because the official docs recommend detaching
+    # the streams to get binary streams.  Some code might do this, so
+    # we need to deal with this case explicitly.
+    if _is_binary_reader(stream, False):
+        return t.cast(t.BinaryIO, stream)
+
+    buf = getattr(stream, "buffer", None)
+
+    # Same situation here; this time we assume that the buffer is
+    # actually binary in case it's closed.
+    if buf is not None and _is_binary_reader(buf, True):
+        return t.cast(t.BinaryIO, buf)
+
+    return None
+
+
+def _find_binary_writer(stream: t.IO) -> t.Optional[t.BinaryIO]:
+    # We need to figure out if the given stream is already binary.
+    # This can happen because the official docs recommend detaching
+    # the streams to get binary streams.  Some code might do this, so
+    # we need to deal with this case explicitly.
+    if _is_binary_writer(stream, False):
+        return t.cast(t.BinaryIO, stream)
+
+    buf = getattr(stream, "buffer", None)
+
+    # Same situation here; this time we assume that the buffer is
+    # actually binary in case it's closed.
+    if buf is not None and _is_binary_writer(buf, True):
+        return t.cast(t.BinaryIO, buf)
+
+    return None
+
+
+def _stream_is_misconfigured(stream: t.TextIO) -> bool:
+    """A stream is misconfigured if its encoding is ASCII."""
+    # If the stream does not have an encoding set, we assume it's set
+    # to ASCII.  This appears to happen in certain unittest
+    # environments.  It's not quite clear what the correct behavior is
+    # but this at least will force Click to recover somehow.
+    return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii")
+
+
+def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool:
+    """A stream attribute is compatible if it is equal to the
+    desired value or the desired value is unset and the attribute
+    has a value.
+    """
+    stream_value = getattr(stream, attr, None)
+    return stream_value == value or (value is None and stream_value is not None)
+
+
+def _is_compatible_text_stream(
+    stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str]
+) -> bool:
+    """Check if a stream's encoding and errors attributes are
+    compatible with the desired values.
+    """
+    return _is_compat_stream_attr(
+        stream, "encoding", encoding
+    ) and _is_compat_stream_attr(stream, "errors", errors)
+
+
+def _force_correct_text_stream(
+    text_stream: t.IO,
+    encoding: t.Optional[str],
+    errors: t.Optional[str],
+    is_binary: t.Callable[[t.IO, bool], bool],
+    find_binary: t.Callable[[t.IO], t.Optional[t.BinaryIO]],
+    force_readable: bool = False,
+    force_writable: bool = False,
+) -> t.TextIO:
+    if is_binary(text_stream, False):
+        binary_reader = t.cast(t.BinaryIO, text_stream)
+    else:
+        text_stream = t.cast(t.TextIO, text_stream)
+        # If the stream looks compatible, and won't default to a
+        # misconfigured ascii encoding, return it as-is.
+        if _is_compatible_text_stream(text_stream, encoding, errors) and not (
+            encoding is None and _stream_is_misconfigured(text_stream)
+        ):
+            return text_stream
+
+        # Otherwise, get the underlying binary reader.
+        possible_binary_reader = find_binary(text_stream)
+
+        # If that's not possible, silently use the original reader
+        # and get mojibake instead of exceptions.
+        if possible_binary_reader is None:
+            return text_stream
+
+        binary_reader = possible_binary_reader
+
+    # Default errors to replace instead of strict in order to get
+    # something that works.
+    if errors is None:
+        errors = "replace"
+
+    # Wrap the binary stream in a text stream with the correct
+    # encoding parameters.
+    return _make_text_stream(
+        binary_reader,
+        encoding,
+        errors,
+        force_readable=force_readable,
+        force_writable=force_writable,
+    )
+
+
+def _force_correct_text_reader(
+    text_reader: t.IO,
+    encoding: t.Optional[str],
+    errors: t.Optional[str],
+    force_readable: bool = False,
+) -> t.TextIO:
+    return _force_correct_text_stream(
+        text_reader,
+        encoding,
+        errors,
+        _is_binary_reader,
+        _find_binary_reader,
+        force_readable=force_readable,
+    )
+
+
+def _force_correct_text_writer(
+    text_writer: t.IO,
+    encoding: t.Optional[str],
+    errors: t.Optional[str],
+    force_writable: bool = False,
+) -> t.TextIO:
+    return _force_correct_text_stream(
+        text_writer,
+        encoding,
+        errors,
+        _is_binary_writer,
+        _find_binary_writer,
+        force_writable=force_writable,
+    )
+
+
+def get_binary_stdin() -> t.BinaryIO:
+    reader = _find_binary_reader(sys.stdin)
+    if reader is None:
+        raise RuntimeError("Was not able to determine binary stream for sys.stdin.")
+    return reader
+
+
+def get_binary_stdout() -> t.BinaryIO:
+    writer = _find_binary_writer(sys.stdout)
+    if writer is None:
+        raise RuntimeError("Was not able to determine binary stream for sys.stdout.")
+    return writer
+
+
+def get_binary_stderr() -> t.BinaryIO:
+    writer = _find_binary_writer(sys.stderr)
+    if writer is None:
+        raise RuntimeError("Was not able to determine binary stream for sys.stderr.")
+    return writer
+
+
+def get_text_stdin(
+    encoding: t.Optional[str] = None, errors: t.Optional[str] = None
+) -> t.TextIO:
+    rv = _get_windows_console_stream(sys.stdin, encoding, errors)
+    if rv is not None:
+        return rv
+    return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True)
+
+
+def get_text_stdout(
+    encoding: t.Optional[str] = None, errors: t.Optional[str] = None
+) -> t.TextIO:
+    rv = _get_windows_console_stream(sys.stdout, encoding, errors)
+    if rv is not None:
+        return rv
+    return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True)
+
+
+def get_text_stderr(
+    encoding: t.Optional[str] = None, errors: t.Optional[str] = None
+) -> t.TextIO:
+    rv = _get_windows_console_stream(sys.stderr, encoding, errors)
+    if rv is not None:
+        return rv
+    return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True)
+
+
+def _wrap_io_open(
+    file: t.Union[str, os.PathLike, int],
+    mode: str,
+    encoding: t.Optional[str],
+    errors: t.Optional[str],
+) -> t.IO:
+    """Handles not passing ``encoding`` and ``errors`` in binary mode."""
+    if "b" in mode:
+        return open(file, mode)
+
+    return open(file, mode, encoding=encoding, errors=errors)
+
+
+def open_stream(
+    filename: str,
+    mode: str = "r",
+    encoding: t.Optional[str] = None,
+    errors: t.Optional[str] = "strict",
+    atomic: bool = False,
+) -> t.Tuple[t.IO, bool]:
+    binary = "b" in mode
+
+    # Standard streams first. These are simple because they ignore the
+    # atomic flag. Use fsdecode to handle Path("-").
+    if os.fsdecode(filename) == "-":
+        if any(m in mode for m in ["w", "a", "x"]):
+            if binary:
+                return get_binary_stdout(), False
+            return get_text_stdout(encoding=encoding, errors=errors), False
+        if binary:
+            return get_binary_stdin(), False
+        return get_text_stdin(encoding=encoding, errors=errors), False
+
+    # Non-atomic writes directly go out through the regular open functions.
+    if not atomic:
+        return _wrap_io_open(filename, mode, encoding, errors), True
+
+    # Some usability stuff for atomic writes
+    if "a" in mode:
+        raise ValueError(
+            "Appending to an existing file is not supported, because that"
+            " would involve an expensive `copy`-operation to a temporary"
+            " file. Open the file in normal `w`-mode and copy explicitly"
+            " if that's what you're after."
+        )
+    if "x" in mode:
+        raise ValueError("Use the `overwrite`-parameter instead.")
+    if "w" not in mode:
+        raise ValueError("Atomic writes only make sense with `w`-mode.")
+
+    # Atomic writes are more complicated.  They work by opening a file
+    # as a proxy in the same folder and then using the fdopen
+    # functionality to wrap it in a Python file.  Then we wrap it in an
+    # atomic file that moves the file over on close.
+    import errno
+    import random
+
+    try:
+        perm: t.Optional[int] = os.stat(filename).st_mode
+    except OSError:
+        perm = None
+
+    flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
+
+    if binary:
+        flags |= getattr(os, "O_BINARY", 0)
+
+    while True:
+        tmp_filename = os.path.join(
+            os.path.dirname(filename),
+            f".__atomic-write{random.randrange(1 << 32):08x}",
+        )
+        try:
+            fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm)
+            break
+        except OSError as e:
+            if e.errno == errno.EEXIST or (
+                os.name == "nt"
+                and e.errno == errno.EACCES
+                and os.path.isdir(e.filename)
+                and os.access(e.filename, os.W_OK)
+            ):
+                continue
+            raise
+
+    if perm is not None:
+        os.chmod(tmp_filename, perm)  # in case perm includes bits in umask
+
+    f = _wrap_io_open(fd, mode, encoding, errors)
+    af = _AtomicFile(f, tmp_filename, os.path.realpath(filename))
+    return t.cast(t.IO, af), True
+
+
+class _AtomicFile:
+    def __init__(self, f: t.IO, tmp_filename: str, real_filename: str) -> None:
+        self._f = f
+        self._tmp_filename = tmp_filename
+        self._real_filename = real_filename
+        self.closed = False
+
+    @property
+    def name(self) -> str:
+        return self._real_filename
+
+    def close(self, delete: bool = False) -> None:
+        if self.closed:
+            return
+        self._f.close()
+        os.replace(self._tmp_filename, self._real_filename)
+        self.closed = True
+
+    def __getattr__(self, name: str) -> t.Any:
+        return getattr(self._f, name)
+
+    def __enter__(self) -> "_AtomicFile":
+        return self
+
+    def __exit__(self, exc_type, exc_value, tb):  # type: ignore
+        self.close(delete=exc_type is not None)
+
+    def __repr__(self) -> str:
+        return repr(self._f)
+
+
+def strip_ansi(value: str) -> str:
+    return _ansi_re.sub("", value)
+
+
+def _is_jupyter_kernel_output(stream: t.IO) -> bool:
+    while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)):
+        stream = stream._stream
+
+    return stream.__class__.__module__.startswith("ipykernel.")
+
+
+def should_strip_ansi(
+    stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None
+) -> bool:
+    if color is None:
+        if stream is None:
+            stream = sys.stdin
+        return not isatty(stream) and not _is_jupyter_kernel_output(stream)
+    return not color
+
+
+# On Windows, wrap the output streams with colorama to support ANSI
+# color codes.
+# NOTE: double check is needed so mypy does not analyze this on Linux
+if sys.platform.startswith("win") and WIN:
+    from ._winconsole import _get_windows_console_stream
+
+    def _get_argv_encoding() -> str:
+        import locale
+
+        return locale.getpreferredencoding()
+
+    _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
+
+    def auto_wrap_for_ansi(
+        stream: t.TextIO, color: t.Optional[bool] = None
+    ) -> t.TextIO:
+        """Support ANSI color and style codes on Windows by wrapping a
+        stream with colorama.
+        """
+        try:
+            cached = _ansi_stream_wrappers.get(stream)
+        except Exception:
+            cached = None
+
+        if cached is not None:
+            return cached
+
+        import colorama
+
+        strip = should_strip_ansi(stream, color)
+        ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)
+        rv = t.cast(t.TextIO, ansi_wrapper.stream)
+        _write = rv.write
+
+        def _safe_write(s):
+            try:
+                return _write(s)
+            except BaseException:
+                ansi_wrapper.reset_all()
+                raise
+
+        rv.write = _safe_write
+
+        try:
+            _ansi_stream_wrappers[stream] = rv
+        except Exception:
+            pass
+
+        return rv
+
+else:
+
+    def _get_argv_encoding() -> str:
+        return getattr(sys.stdin, "encoding", None) or get_filesystem_encoding()
+
+    def _get_windows_console_stream(
+        f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str]
+    ) -> t.Optional[t.TextIO]:
+        return None
+
+
+def term_len(x: str) -> int:
+    return len(strip_ansi(x))
+
+
+def isatty(stream: t.IO) -> bool:
+    try:
+        return stream.isatty()
+    except Exception:
+        return False
+
+
+def _make_cached_stream_func(
+    src_func: t.Callable[[], t.TextIO], wrapper_func: t.Callable[[], t.TextIO]
+) -> t.Callable[[], t.TextIO]:
+    cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
+
+    def func() -> t.TextIO:
+        stream = src_func()
+        try:
+            rv = cache.get(stream)
+        except Exception:
+            rv = None
+        if rv is not None:
+            return rv
+        rv = wrapper_func()
+        try:
+            cache[stream] = rv
+        except Exception:
+            pass
+        return rv
+
+    return func
+
+
+_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin)
+_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout)
+_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr)
+
+
+binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = {
+    "stdin": get_binary_stdin,
+    "stdout": get_binary_stdout,
+    "stderr": get_binary_stderr,
+}
+
+text_streams: t.Mapping[
+    str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO]
+] = {
+    "stdin": get_text_stdin,
+    "stdout": get_text_stdout,
+    "stderr": get_text_stderr,
+}
diff --git a/venv/lib/python3.9/site-packages/click/_termui_impl.py b/venv/lib/python3.9/site-packages/click/_termui_impl.py
new file mode 100644
index 0000000..4b979bc
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/_termui_impl.py
@@ -0,0 +1,717 @@
+"""
+This module contains implementations for the termui module. To keep the
+import time of Click down, some infrequently used functionality is
+placed in this module and only imported as needed.
+"""
+import contextlib
+import math
+import os
+import sys
+import time
+import typing as t
+from gettext import gettext as _
+
+from ._compat import _default_text_stdout
+from ._compat import CYGWIN
+from ._compat import get_best_encoding
+from ._compat import isatty
+from ._compat import open_stream
+from ._compat import strip_ansi
+from ._compat import term_len
+from ._compat import WIN
+from .exceptions import ClickException
+from .utils import echo
+
+V = t.TypeVar("V")
+
+if os.name == "nt":
+    BEFORE_BAR = "\r"
+    AFTER_BAR = "\n"
+else:
+    BEFORE_BAR = "\r\033[?25l"
+    AFTER_BAR = "\033[?25h\n"
+
+
+class ProgressBar(t.Generic[V]):
+    def __init__(
+        self,
+        iterable: t.Optional[t.Iterable[V]],
+        length: t.Optional[int] = None,
+        fill_char: str = "#",
+        empty_char: str = " ",
+        bar_template: str = "%(bar)s",
+        info_sep: str = "  ",
+        show_eta: bool = True,
+        show_percent: t.Optional[bool] = None,
+        show_pos: bool = False,
+        item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None,
+        label: t.Optional[str] = None,
+        file: t.Optional[t.TextIO] = None,
+        color: t.Optional[bool] = None,
+        update_min_steps: int = 1,
+        width: int = 30,
+    ) -> None:
+        self.fill_char = fill_char
+        self.empty_char = empty_char
+        self.bar_template = bar_template
+        self.info_sep = info_sep
+        self.show_eta = show_eta
+        self.show_percent = show_percent
+        self.show_pos = show_pos
+        self.item_show_func = item_show_func
+        self.label = label or ""
+        if file is None:
+            file = _default_text_stdout()
+        self.file = file
+        self.color = color
+        self.update_min_steps = update_min_steps
+        self._completed_intervals = 0
+        self.width = width
+        self.autowidth = width == 0
+
+        if length is None:
+            from operator import length_hint
+
+            length = length_hint(iterable, -1)
+
+            if length == -1:
+                length = None
+        if iterable is None:
+            if length is None:
+                raise TypeError("iterable or length is required")
+            iterable = t.cast(t.Iterable[V], range(length))
+        self.iter = iter(iterable)
+        self.length = length
+        self.pos = 0
+        self.avg: t.List[float] = []
+        self.start = self.last_eta = time.time()
+        self.eta_known = False
+        self.finished = False
+        self.max_width: t.Optional[int] = None
+        self.entered = False
+        self.current_item: t.Optional[V] = None
+        self.is_hidden = not isatty(self.file)
+        self._last_line: t.Optional[str] = None
+
+    def __enter__(self) -> "ProgressBar":
+        self.entered = True
+        self.render_progress()
+        return self
+
+    def __exit__(self, exc_type, exc_value, tb):  # type: ignore
+        self.render_finish()
+
+    def __iter__(self) -> t.Iterator[V]:
+        if not self.entered:
+            raise RuntimeError("You need to use progress bars in a with block.")
+        self.render_progress()
+        return self.generator()
+
+    def __next__(self) -> V:
+        # Iteration is defined in terms of a generator function,
+        # returned by iter(self); use that to define next(). This works
+        # because `self.iter` is an iterable consumed by that generator,
+        # so it is re-entry safe. Calling `next(self.generator())`
+        # twice works and does "what you want".
+        return next(iter(self))
+
+    def render_finish(self) -> None:
+        if self.is_hidden:
+            return
+        self.file.write(AFTER_BAR)
+        self.file.flush()
+
+    @property
+    def pct(self) -> float:
+        if self.finished:
+            return 1.0
+        return min(self.pos / (float(self.length or 1) or 1), 1.0)
+
+    @property
+    def time_per_iteration(self) -> float:
+        if not self.avg:
+            return 0.0
+        return sum(self.avg) / float(len(self.avg))
+
+    @property
+    def eta(self) -> float:
+        if self.length is not None and not self.finished:
+            return self.time_per_iteration * (self.length - self.pos)
+        return 0.0
+
+    def format_eta(self) -> str:
+        if self.eta_known:
+            t = int(self.eta)
+            seconds = t % 60
+            t //= 60
+            minutes = t % 60
+            t //= 60
+            hours = t % 24
+            t //= 24
+            if t > 0:
+                return f"{t}d {hours:02}:{minutes:02}:{seconds:02}"
+            else:
+                return f"{hours:02}:{minutes:02}:{seconds:02}"
+        return ""
+
+    def format_pos(self) -> str:
+        pos = str(self.pos)
+        if self.length is not None:
+            pos += f"/{self.length}"
+        return pos
+
+    def format_pct(self) -> str:
+        return f"{int(self.pct * 100): 4}%"[1:]
+
+    def format_bar(self) -> str:
+        if self.length is not None:
+            bar_length = int(self.pct * self.width)
+            bar = self.fill_char * bar_length
+            bar += self.empty_char * (self.width - bar_length)
+        elif self.finished:
+            bar = self.fill_char * self.width
+        else:
+            chars = list(self.empty_char * (self.width or 1))
+            if self.time_per_iteration != 0:
+                chars[
+                    int(
+                        (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5)
+                        * self.width
+                    )
+                ] = self.fill_char
+            bar = "".join(chars)
+        return bar
+
+    def format_progress_line(self) -> str:
+        show_percent = self.show_percent
+
+        info_bits = []
+        if self.length is not None and show_percent is None:
+            show_percent = not self.show_pos
+
+        if self.show_pos:
+            info_bits.append(self.format_pos())
+        if show_percent:
+            info_bits.append(self.format_pct())
+        if self.show_eta and self.eta_known and not self.finished:
+            info_bits.append(self.format_eta())
+        if self.item_show_func is not None:
+            item_info = self.item_show_func(self.current_item)
+            if item_info is not None:
+                info_bits.append(item_info)
+
+        return (
+            self.bar_template
+            % {
+                "label": self.label,
+                "bar": self.format_bar(),
+                "info": self.info_sep.join(info_bits),
+            }
+        ).rstrip()
+
+    def render_progress(self) -> None:
+        import shutil
+
+        if self.is_hidden:
+            # Only output the label as it changes if the output is not a
+            # TTY. Use file=stderr if you expect to be piping stdout.
+            if self._last_line != self.label:
+                self._last_line = self.label
+                echo(self.label, file=self.file, color=self.color)
+
+            return
+
+        buf = []
+        # Update width in case the terminal has been resized
+        if self.autowidth:
+            old_width = self.width
+            self.width = 0
+            clutter_length = term_len(self.format_progress_line())
+            new_width = max(0, shutil.get_terminal_size().columns - clutter_length)
+            if new_width < old_width:
+                buf.append(BEFORE_BAR)
+                buf.append(" " * self.max_width)  # type: ignore
+                self.max_width = new_width
+            self.width = new_width
+
+        clear_width = self.width
+        if self.max_width is not None:
+            clear_width = self.max_width
+
+        buf.append(BEFORE_BAR)
+        line = self.format_progress_line()
+        line_len = term_len(line)
+        if self.max_width is None or self.max_width < line_len:
+            self.max_width = line_len
+
+        buf.append(line)
+        buf.append(" " * (clear_width - line_len))
+        line = "".join(buf)
+        # Render the line only if it changed.
+
+        if line != self._last_line:
+            self._last_line = line
+            echo(line, file=self.file, color=self.color, nl=False)
+            self.file.flush()
+
+    def make_step(self, n_steps: int) -> None:
+        self.pos += n_steps
+        if self.length is not None and self.pos >= self.length:
+            self.finished = True
+
+        if (time.time() - self.last_eta) < 1.0:
+            return
+
+        self.last_eta = time.time()
+
+        # self.avg is a rolling list of length <= 7 of steps where steps are
+        # defined as time elapsed divided by the total progress through
+        # self.length.
+        if self.pos:
+            step = (time.time() - self.start) / self.pos
+        else:
+            step = time.time() - self.start
+
+        self.avg = self.avg[-6:] + [step]
+
+        self.eta_known = self.length is not None
+
+    def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None:
+        """Update the progress bar by advancing a specified number of
+        steps, and optionally set the ``current_item`` for this new
+        position.
+
+        :param n_steps: Number of steps to advance.
+        :param current_item: Optional item to set as ``current_item``
+            for the updated position.
+
+        .. versionchanged:: 8.0
+            Added the ``current_item`` optional parameter.
+
+        .. versionchanged:: 8.0
+            Only render when the number of steps meets the
+            ``update_min_steps`` threshold.
+        """
+        if current_item is not None:
+            self.current_item = current_item
+
+        self._completed_intervals += n_steps
+
+        if self._completed_intervals >= self.update_min_steps:
+            self.make_step(self._completed_intervals)
+            self.render_progress()
+            self._completed_intervals = 0
+
+    def finish(self) -> None:
+        self.eta_known = False
+        self.current_item = None
+        self.finished = True
+
+    def generator(self) -> t.Iterator[V]:
+        """Return a generator which yields the items added to the bar
+        during construction, and updates the progress bar *after* the
+        yielded block returns.
+        """
+        # WARNING: the iterator interface for `ProgressBar` relies on
+        # this and only works because this is a simple generator which
+        # doesn't create or manage additional state. If this function
+        # changes, the impact should be evaluated both against
+        # `iter(bar)` and `next(bar)`. `next()` in particular may call
+        # `self.generator()` repeatedly, and this must remain safe in
+        # order for that interface to work.
+        if not self.entered:
+            raise RuntimeError("You need to use progress bars in a with block.")
+
+        if self.is_hidden:
+            yield from self.iter
+        else:
+            for rv in self.iter:
+                self.current_item = rv
+
+                # This allows show_item_func to be updated before the
+                # item is processed. Only trigger at the beginning of
+                # the update interval.
+                if self._completed_intervals == 0:
+                    self.render_progress()
+
+                yield rv
+                self.update(1)
+
+            self.finish()
+            self.render_progress()
+
+
+def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None:
+    """Decide what method to use for paging through text."""
+    stdout = _default_text_stdout()
+    if not isatty(sys.stdin) or not isatty(stdout):
+        return _nullpager(stdout, generator, color)
+    pager_cmd = (os.environ.get("PAGER", None) or "").strip()
+    if pager_cmd:
+        if WIN:
+            return _tempfilepager(generator, pager_cmd, color)
+        return _pipepager(generator, pager_cmd, color)
+    if os.environ.get("TERM") in ("dumb", "emacs"):
+        return _nullpager(stdout, generator, color)
+    if WIN or sys.platform.startswith("os2"):
+        return _tempfilepager(generator, "more <", color)
+    if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0:
+        return _pipepager(generator, "less", color)
+
+    import tempfile
+
+    fd, filename = tempfile.mkstemp()
+    os.close(fd)
+    try:
+        if hasattr(os, "system") and os.system(f'more "{filename}"') == 0:
+            return _pipepager(generator, "more", color)
+        return _nullpager(stdout, generator, color)
+    finally:
+        os.unlink(filename)
+
+
+def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None:
+    """Page through text by feeding it to another program.  Invoking a
+    pager through this might support colors.
+    """
+    import subprocess
+
+    env = dict(os.environ)
+
+    # If we're piping to less we might support colors under the
+    # condition that
+    cmd_detail = cmd.rsplit("/", 1)[-1].split()
+    if color is None and cmd_detail[0] == "less":
+        less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}"
+        if not less_flags:
+            env["LESS"] = "-R"
+            color = True
+        elif "r" in less_flags or "R" in less_flags:
+            color = True
+
+    c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env)
+    stdin = t.cast(t.BinaryIO, c.stdin)
+    encoding = get_best_encoding(stdin)
+    try:
+        for text in generator:
+            if not color:
+                text = strip_ansi(text)
+
+            stdin.write(text.encode(encoding, "replace"))
+    except (OSError, KeyboardInterrupt):
+        pass
+    else:
+        stdin.close()
+
+    # Less doesn't respect ^C, but catches it for its own UI purposes (aborting
+    # search or other commands inside less).
+    #
+    # That means when the user hits ^C, the parent process (click) terminates,
+    # but less is still alive, paging the output and messing up the terminal.
+    #
+    # If the user wants to make the pager exit on ^C, they should set
+    # `LESS='-K'`. It's not our decision to make.
+    while True:
+        try:
+            c.wait()
+        except KeyboardInterrupt:
+            pass
+        else:
+            break
+
+
+def _tempfilepager(
+    generator: t.Iterable[str], cmd: str, color: t.Optional[bool]
+) -> None:
+    """Page through text by invoking a program on a temporary file."""
+    import tempfile
+
+    fd, filename = tempfile.mkstemp()
+    # TODO: This never terminates if the passed generator never terminates.
+    text = "".join(generator)
+    if not color:
+        text = strip_ansi(text)
+    encoding = get_best_encoding(sys.stdout)
+    with open_stream(filename, "wb")[0] as f:
+        f.write(text.encode(encoding))
+    try:
+        os.system(f'{cmd} "{filename}"')
+    finally:
+        os.close(fd)
+        os.unlink(filename)
+
+
+def _nullpager(
+    stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool]
+) -> None:
+    """Simply print unformatted text.  This is the ultimate fallback."""
+    for text in generator:
+        if not color:
+            text = strip_ansi(text)
+        stream.write(text)
+
+
+class Editor:
+    def __init__(
+        self,
+        editor: t.Optional[str] = None,
+        env: t.Optional[t.Mapping[str, str]] = None,
+        require_save: bool = True,
+        extension: str = ".txt",
+    ) -> None:
+        self.editor = editor
+        self.env = env
+        self.require_save = require_save
+        self.extension = extension
+
+    def get_editor(self) -> str:
+        if self.editor is not None:
+            return self.editor
+        for key in "VISUAL", "EDITOR":
+            rv = os.environ.get(key)
+            if rv:
+                return rv
+        if WIN:
+            return "notepad"
+        for editor in "sensible-editor", "vim", "nano":
+            if os.system(f"which {editor} >/dev/null 2>&1") == 0:
+                return editor
+        return "vi"
+
+    def edit_file(self, filename: str) -> None:
+        import subprocess
+
+        editor = self.get_editor()
+        environ: t.Optional[t.Dict[str, str]] = None
+
+        if self.env:
+            environ = os.environ.copy()
+            environ.update(self.env)
+
+        try:
+            c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True)
+            exit_code = c.wait()
+            if exit_code != 0:
+                raise ClickException(
+                    _("{editor}: Editing failed").format(editor=editor)
+                )
+        except OSError as e:
+            raise ClickException(
+                _("{editor}: Editing failed: {e}").format(editor=editor, e=e)
+            ) from e
+
+    def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]:
+        import tempfile
+
+        if not text:
+            data = b""
+        elif isinstance(text, (bytes, bytearray)):
+            data = text
+        else:
+            if text and not text.endswith("\n"):
+                text += "\n"
+
+            if WIN:
+                data = text.replace("\n", "\r\n").encode("utf-8-sig")
+            else:
+                data = text.encode("utf-8")
+
+        fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension)
+        f: t.BinaryIO
+
+        try:
+            with os.fdopen(fd, "wb") as f:
+                f.write(data)
+
+            # If the filesystem resolution is 1 second, like Mac OS
+            # 10.12 Extended, or 2 seconds, like FAT32, and the editor
+            # closes very fast, require_save can fail. Set the modified
+            # time to be 2 seconds in the past to work around this.
+            os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2))
+            # Depending on the resolution, the exact value might not be
+            # recorded, so get the new recorded value.
+            timestamp = os.path.getmtime(name)
+
+            self.edit_file(name)
+
+            if self.require_save and os.path.getmtime(name) == timestamp:
+                return None
+
+            with open(name, "rb") as f:
+                rv = f.read()
+
+            if isinstance(text, (bytes, bytearray)):
+                return rv
+
+            return rv.decode("utf-8-sig").replace("\r\n", "\n")  # type: ignore
+        finally:
+            os.unlink(name)
+
+
+def open_url(url: str, wait: bool = False, locate: bool = False) -> int:
+    import subprocess
+
+    def _unquote_file(url: str) -> str:
+        from urllib.parse import unquote
+
+        if url.startswith("file://"):
+            url = unquote(url[7:])
+
+        return url
+
+    if sys.platform == "darwin":
+        args = ["open"]
+        if wait:
+            args.append("-W")
+        if locate:
+            args.append("-R")
+        args.append(_unquote_file(url))
+        null = open("/dev/null", "w")
+        try:
+            return subprocess.Popen(args, stderr=null).wait()
+        finally:
+            null.close()
+    elif WIN:
+        if locate:
+            url = _unquote_file(url.replace('"', ""))
+            args = f'explorer /select,"{url}"'
+        else:
+            url = url.replace('"', "")
+            wait_str = "/WAIT" if wait else ""
+            args = f'start {wait_str} "" "{url}"'
+        return os.system(args)
+    elif CYGWIN:
+        if locate:
+            url = os.path.dirname(_unquote_file(url).replace('"', ""))
+            args = f'cygstart "{url}"'
+        else:
+            url = url.replace('"', "")
+            wait_str = "-w" if wait else ""
+            args = f'cygstart {wait_str} "{url}"'
+        return os.system(args)
+
+    try:
+        if locate:
+            url = os.path.dirname(_unquote_file(url)) or "."
+        else:
+            url = _unquote_file(url)
+        c = subprocess.Popen(["xdg-open", url])
+        if wait:
+            return c.wait()
+        return 0
+    except OSError:
+        if url.startswith(("http://", "https://")) and not locate and not wait:
+            import webbrowser
+
+            webbrowser.open(url)
+            return 0
+        return 1
+
+
+def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]:
+    if ch == "\x03":
+        raise KeyboardInterrupt()
+
+    if ch == "\x04" and not WIN:  # Unix-like, Ctrl+D
+        raise EOFError()
+
+    if ch == "\x1a" and WIN:  # Windows, Ctrl+Z
+        raise EOFError()
+
+    return None
+
+
+if WIN:
+    import msvcrt
+
+    @contextlib.contextmanager
+    def raw_terminal() -> t.Iterator[int]:
+        yield -1
+
+    def getchar(echo: bool) -> str:
+        # The function `getch` will return a bytes object corresponding to
+        # the pressed character. Since Windows 10 build 1803, it will also
+        # return \x00 when called a second time after pressing a regular key.
+        #
+        # `getwch` does not share this probably-bugged behavior. Moreover, it
+        # returns a Unicode object by default, which is what we want.
+        #
+        # Either of these functions will return \x00 or \xe0 to indicate
+        # a special key, and you need to call the same function again to get
+        # the "rest" of the code. The fun part is that \u00e0 is
+        # "latin small letter a with grave", so if you type that on a French
+        # keyboard, you _also_ get a \xe0.
+        # E.g., consider the Up arrow. This returns \xe0 and then \x48. The
+        # resulting Unicode string reads as "a with grave" + "capital H".
+        # This is indistinguishable from when the user actually types
+        # "a with grave" and then "capital H".
+        #
+        # When \xe0 is returned, we assume it's part of a special-key sequence
+        # and call `getwch` again, but that means that when the user types
+        # the \u00e0 character, `getchar` doesn't return until a second
+        # character is typed.
+        # The alternative is returning immediately, but that would mess up
+        # cross-platform handling of arrow keys and others that start with
+        # \xe0. Another option is using `getch`, but then we can't reliably
+        # read non-ASCII characters, because return values of `getch` are
+        # limited to the current 8-bit codepage.
+        #
+        # Anyway, Click doesn't claim to do this Right(tm), and using `getwch`
+        # is doing the right thing in more situations than with `getch`.
+        func: t.Callable[[], str]
+
+        if echo:
+            func = msvcrt.getwche  # type: ignore
+        else:
+            func = msvcrt.getwch  # type: ignore
+
+        rv = func()
+
+        if rv in ("\x00", "\xe0"):
+            # \x00 and \xe0 are control characters that indicate special key,
+            # see above.
+            rv += func()
+
+        _translate_ch_to_exc(rv)
+        return rv
+
+else:
+    import tty
+    import termios
+
+    @contextlib.contextmanager
+    def raw_terminal() -> t.Iterator[int]:
+        f: t.Optional[t.TextIO]
+        fd: int
+
+        if not isatty(sys.stdin):
+            f = open("/dev/tty")
+            fd = f.fileno()
+        else:
+            fd = sys.stdin.fileno()
+            f = None
+
+        try:
+            old_settings = termios.tcgetattr(fd)
+
+            try:
+                tty.setraw(fd)
+                yield fd
+            finally:
+                termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+                sys.stdout.flush()
+
+                if f is not None:
+                    f.close()
+        except termios.error:
+            pass
+
+    def getchar(echo: bool) -> str:
+        with raw_terminal() as fd:
+            ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace")
+
+            if echo and isatty(sys.stdout):
+                sys.stdout.write(ch)
+
+            _translate_ch_to_exc(ch)
+            return ch
diff --git a/venv/lib/python3.9/site-packages/click/_textwrap.py b/venv/lib/python3.9/site-packages/click/_textwrap.py
new file mode 100644
index 0000000..b47dcbd
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/_textwrap.py
@@ -0,0 +1,49 @@
+import textwrap
+import typing as t
+from contextlib import contextmanager
+
+
+class TextWrapper(textwrap.TextWrapper):
+    def _handle_long_word(
+        self,
+        reversed_chunks: t.List[str],
+        cur_line: t.List[str],
+        cur_len: int,
+        width: int,
+    ) -> None:
+        space_left = max(width - cur_len, 1)
+
+        if self.break_long_words:
+            last = reversed_chunks[-1]
+            cut = last[:space_left]
+            res = last[space_left:]
+            cur_line.append(cut)
+            reversed_chunks[-1] = res
+        elif not cur_line:
+            cur_line.append(reversed_chunks.pop())
+
+    @contextmanager
+    def extra_indent(self, indent: str) -> t.Iterator[None]:
+        old_initial_indent = self.initial_indent
+        old_subsequent_indent = self.subsequent_indent
+        self.initial_indent += indent
+        self.subsequent_indent += indent
+
+        try:
+            yield
+        finally:
+            self.initial_indent = old_initial_indent
+            self.subsequent_indent = old_subsequent_indent
+
+    def indent_only(self, text: str) -> str:
+        rv = []
+
+        for idx, line in enumerate(text.splitlines()):
+            indent = self.initial_indent
+
+            if idx > 0:
+                indent = self.subsequent_indent
+
+            rv.append(f"{indent}{line}")
+
+        return "\n".join(rv)
diff --git a/venv/lib/python3.9/site-packages/click/_winconsole.py b/venv/lib/python3.9/site-packages/click/_winconsole.py
new file mode 100644
index 0000000..6b20df3
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/_winconsole.py
@@ -0,0 +1,279 @@
+# This module is based on the excellent work by Adam Bartoš who
+# provided a lot of what went into the implementation here in
+# the discussion to issue1602 in the Python bug tracker.
+#
+# There are some general differences in regards to how this works
+# compared to the original patches as we do not need to patch
+# the entire interpreter but just work in our little world of
+# echo and prompt.
+import io
+import sys
+import time
+import typing as t
+from ctypes import byref
+from ctypes import c_char
+from ctypes import c_char_p
+from ctypes import c_int
+from ctypes import c_ssize_t
+from ctypes import c_ulong
+from ctypes import c_void_p
+from ctypes import POINTER
+from ctypes import py_object
+from ctypes import Structure
+from ctypes.wintypes import DWORD
+from ctypes.wintypes import HANDLE
+from ctypes.wintypes import LPCWSTR
+from ctypes.wintypes import LPWSTR
+
+from ._compat import _NonClosingTextIOWrapper
+
+assert sys.platform == "win32"
+import msvcrt  # noqa: E402
+from ctypes import windll  # noqa: E402
+from ctypes import WINFUNCTYPE  # noqa: E402
+
+c_ssize_p = POINTER(c_ssize_t)
+
+kernel32 = windll.kernel32
+GetStdHandle = kernel32.GetStdHandle
+ReadConsoleW = kernel32.ReadConsoleW
+WriteConsoleW = kernel32.WriteConsoleW
+GetConsoleMode = kernel32.GetConsoleMode
+GetLastError = kernel32.GetLastError
+GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32))
+CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
+    ("CommandLineToArgvW", windll.shell32)
+)
+LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32))
+
+STDIN_HANDLE = GetStdHandle(-10)
+STDOUT_HANDLE = GetStdHandle(-11)
+STDERR_HANDLE = GetStdHandle(-12)
+
+PyBUF_SIMPLE = 0
+PyBUF_WRITABLE = 1
+
+ERROR_SUCCESS = 0
+ERROR_NOT_ENOUGH_MEMORY = 8
+ERROR_OPERATION_ABORTED = 995
+
+STDIN_FILENO = 0
+STDOUT_FILENO = 1
+STDERR_FILENO = 2
+
+EOF = b"\x1a"
+MAX_BYTES_WRITTEN = 32767
+
+try:
+    from ctypes import pythonapi
+except ImportError:
+    # On PyPy we cannot get buffers so our ability to operate here is
+    # severely limited.
+    get_buffer = None
+else:
+
+    class Py_buffer(Structure):
+        _fields_ = [
+            ("buf", c_void_p),
+            ("obj", py_object),
+            ("len", c_ssize_t),
+            ("itemsize", c_ssize_t),
+            ("readonly", c_int),
+            ("ndim", c_int),
+            ("format", c_char_p),
+            ("shape", c_ssize_p),
+            ("strides", c_ssize_p),
+            ("suboffsets", c_ssize_p),
+            ("internal", c_void_p),
+        ]
+
+    PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
+    PyBuffer_Release = pythonapi.PyBuffer_Release
+
+    def get_buffer(obj, writable=False):
+        buf = Py_buffer()
+        flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE
+        PyObject_GetBuffer(py_object(obj), byref(buf), flags)
+
+        try:
+            buffer_type = c_char * buf.len
+            return buffer_type.from_address(buf.buf)
+        finally:
+            PyBuffer_Release(byref(buf))
+
+
+class _WindowsConsoleRawIOBase(io.RawIOBase):
+    def __init__(self, handle):
+        self.handle = handle
+
+    def isatty(self):
+        super().isatty()
+        return True
+
+
+class _WindowsConsoleReader(_WindowsConsoleRawIOBase):
+    def readable(self):
+        return True
+
+    def readinto(self, b):
+        bytes_to_be_read = len(b)
+        if not bytes_to_be_read:
+            return 0
+        elif bytes_to_be_read % 2:
+            raise ValueError(
+                "cannot read odd number of bytes from UTF-16-LE encoded console"
+            )
+
+        buffer = get_buffer(b, writable=True)
+        code_units_to_be_read = bytes_to_be_read // 2
+        code_units_read = c_ulong()
+
+        rv = ReadConsoleW(
+            HANDLE(self.handle),
+            buffer,
+            code_units_to_be_read,
+            byref(code_units_read),
+            None,
+        )
+        if GetLastError() == ERROR_OPERATION_ABORTED:
+            # wait for KeyboardInterrupt
+            time.sleep(0.1)
+        if not rv:
+            raise OSError(f"Windows error: {GetLastError()}")
+
+        if buffer[0] == EOF:
+            return 0
+        return 2 * code_units_read.value
+
+
+class _WindowsConsoleWriter(_WindowsConsoleRawIOBase):
+    def writable(self):
+        return True
+
+    @staticmethod
+    def _get_error_message(errno):
+        if errno == ERROR_SUCCESS:
+            return "ERROR_SUCCESS"
+        elif errno == ERROR_NOT_ENOUGH_MEMORY:
+            return "ERROR_NOT_ENOUGH_MEMORY"
+        return f"Windows error {errno}"
+
+    def write(self, b):
+        bytes_to_be_written = len(b)
+        buf = get_buffer(b)
+        code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2
+        code_units_written = c_ulong()
+
+        WriteConsoleW(
+            HANDLE(self.handle),
+            buf,
+            code_units_to_be_written,
+            byref(code_units_written),
+            None,
+        )
+        bytes_written = 2 * code_units_written.value
+
+        if bytes_written == 0 and bytes_to_be_written > 0:
+            raise OSError(self._get_error_message(GetLastError()))
+        return bytes_written
+
+
+class ConsoleStream:
+    def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None:
+        self._text_stream = text_stream
+        self.buffer = byte_stream
+
+    @property
+    def name(self) -> str:
+        return self.buffer.name
+
+    def write(self, x: t.AnyStr) -> int:
+        if isinstance(x, str):
+            return self._text_stream.write(x)
+        try:
+            self.flush()
+        except Exception:
+            pass
+        return self.buffer.write(x)
+
+    def writelines(self, lines: t.Iterable[t.AnyStr]) -> None:
+        for line in lines:
+            self.write(line)
+
+    def __getattr__(self, name: str) -> t.Any:
+        return getattr(self._text_stream, name)
+
+    def isatty(self) -> bool:
+        return self.buffer.isatty()
+
+    def __repr__(self):
+        return f"<ConsoleStream name={self.name!r} encoding={self.encoding!r}>"
+
+
+def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO:
+    text_stream = _NonClosingTextIOWrapper(
+        io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
+        "utf-16-le",
+        "strict",
+        line_buffering=True,
+    )
+    return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
+
+
+def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO:
+    text_stream = _NonClosingTextIOWrapper(
+        io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)),
+        "utf-16-le",
+        "strict",
+        line_buffering=True,
+    )
+    return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
+
+
+def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO:
+    text_stream = _NonClosingTextIOWrapper(
+        io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)),
+        "utf-16-le",
+        "strict",
+        line_buffering=True,
+    )
+    return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
+
+
+_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = {
+    0: _get_text_stdin,
+    1: _get_text_stdout,
+    2: _get_text_stderr,
+}
+
+
+def _is_console(f: t.TextIO) -> bool:
+    if not hasattr(f, "fileno"):
+        return False
+
+    try:
+        fileno = f.fileno()
+    except (OSError, io.UnsupportedOperation):
+        return False
+
+    handle = msvcrt.get_osfhandle(fileno)
+    return bool(GetConsoleMode(handle, byref(DWORD())))
+
+
+def _get_windows_console_stream(
+    f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str]
+) -> t.Optional[t.TextIO]:
+    if (
+        get_buffer is not None
+        and encoding in {"utf-16-le", None}
+        and errors in {"strict", None}
+        and _is_console(f)
+    ):
+        func = _stream_factories.get(f.fileno())
+        if func is not None:
+            b = getattr(f, "buffer", None)
+
+            if b is None:
+                return None
+
+            return func(b)
diff --git a/venv/lib/python3.9/site-packages/click/core.py b/venv/lib/python3.9/site-packages/click/core.py
new file mode 100644
index 0000000..5abfb0f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/core.py
@@ -0,0 +1,2998 @@
+import enum
+import errno
+import inspect
+import os
+import sys
+import typing as t
+from collections import abc
+from contextlib import contextmanager
+from contextlib import ExitStack
+from functools import partial
+from functools import update_wrapper
+from gettext import gettext as _
+from gettext import ngettext
+from itertools import repeat
+
+from . import types
+from .exceptions import Abort
+from .exceptions import BadParameter
+from .exceptions import ClickException
+from .exceptions import Exit
+from .exceptions import MissingParameter
+from .exceptions import UsageError
+from .formatting import HelpFormatter
+from .formatting import join_options
+from .globals import pop_context
+from .globals import push_context
+from .parser import _flag_needs_value
+from .parser import OptionParser
+from .parser import split_opt
+from .termui import confirm
+from .termui import prompt
+from .termui import style
+from .utils import _detect_program_name
+from .utils import _expand_args
+from .utils import echo
+from .utils import make_default_short_help
+from .utils import make_str
+from .utils import PacifyFlushWrapper
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .shell_completion import CompletionItem
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+V = t.TypeVar("V")
+
+
+def _complete_visible_commands(
+    ctx: "Context", incomplete: str
+) -> t.Iterator[t.Tuple[str, "Command"]]:
+    """List all the subcommands of a group that start with the
+    incomplete value and aren't hidden.
+
+    :param ctx: Invocation context for the group.
+    :param incomplete: Value being completed. May be empty.
+    """
+    multi = t.cast(MultiCommand, ctx.command)
+
+    for name in multi.list_commands(ctx):
+        if name.startswith(incomplete):
+            command = multi.get_command(ctx, name)
+
+            if command is not None and not command.hidden:
+                yield name, command
+
+
+def _check_multicommand(
+    base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False
+) -> None:
+    if not base_command.chain or not isinstance(cmd, MultiCommand):
+        return
+    if register:
+        hint = (
+            "It is not possible to add multi commands as children to"
+            " another multi command that is in chain mode."
+        )
+    else:
+        hint = (
+            "Found a multi command as subcommand to a multi command"
+            " that is in chain mode. This is not supported."
+        )
+    raise RuntimeError(
+        f"{hint}. Command {base_command.name!r} is set to chain and"
+        f" {cmd_name!r} was added as a subcommand but it in itself is a"
+        f" multi command. ({cmd_name!r} is a {type(cmd).__name__}"
+        f" within a chained {type(base_command).__name__} named"
+        f" {base_command.name!r})."
+    )
+
+
+def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]:
+    return list(zip(*repeat(iter(iterable), batch_size)))
+
+
+@contextmanager
+def augment_usage_errors(
+    ctx: "Context", param: t.Optional["Parameter"] = None
+) -> t.Iterator[None]:
+    """Context manager that attaches extra information to exceptions."""
+    try:
+        yield
+    except BadParameter as e:
+        if e.ctx is None:
+            e.ctx = ctx
+        if param is not None and e.param is None:
+            e.param = param
+        raise
+    except UsageError as e:
+        if e.ctx is None:
+            e.ctx = ctx
+        raise
+
+
+def iter_params_for_processing(
+    invocation_order: t.Sequence["Parameter"],
+    declaration_order: t.Sequence["Parameter"],
+) -> t.List["Parameter"]:
+    """Given a sequence of parameters in the order as should be considered
+    for processing and an iterable of parameters that exist, this returns
+    a list in the correct order as they should be processed.
+    """
+
+    def sort_key(item: "Parameter") -> t.Tuple[bool, float]:
+        try:
+            idx: float = invocation_order.index(item)
+        except ValueError:
+            idx = float("inf")
+
+        return not item.is_eager, idx
+
+    return sorted(declaration_order, key=sort_key)
+
+
+class ParameterSource(enum.Enum):
+    """This is an :class:`~enum.Enum` that indicates the source of a
+    parameter's value.
+
+    Use :meth:`click.Context.get_parameter_source` to get the
+    source for a parameter by name.
+
+    .. versionchanged:: 8.0
+        Use :class:`~enum.Enum` and drop the ``validate`` method.
+
+    .. versionchanged:: 8.0
+        Added the ``PROMPT`` value.
+    """
+
+    COMMANDLINE = enum.auto()
+    """The value was provided by the command line args."""
+    ENVIRONMENT = enum.auto()
+    """The value was provided with an environment variable."""
+    DEFAULT = enum.auto()
+    """Used the default specified by the parameter."""
+    DEFAULT_MAP = enum.auto()
+    """Used a default provided by :attr:`Context.default_map`."""
+    PROMPT = enum.auto()
+    """Used a prompt to confirm a default or provide a value."""
+
+
+class Context:
+    """The context is a special internal object that holds state relevant
+    for the script execution at every single level.  It's normally invisible
+    to commands unless they opt-in to getting access to it.
+
+    The context is useful as it can pass internal objects around and can
+    control special execution features such as reading data from
+    environment variables.
+
+    A context can be used as context manager in which case it will call
+    :meth:`close` on teardown.
+
+    :param command: the command class for this context.
+    :param parent: the parent context.
+    :param info_name: the info name for this invocation.  Generally this
+                      is the most descriptive name for the script or
+                      command.  For the toplevel script it is usually
+                      the name of the script, for commands below it it's
+                      the name of the script.
+    :param obj: an arbitrary object of user data.
+    :param auto_envvar_prefix: the prefix to use for automatic environment
+                               variables.  If this is `None` then reading
+                               from environment variables is disabled.  This
+                               does not affect manually set environment
+                               variables which are always read.
+    :param default_map: a dictionary (like object) with default values
+                        for parameters.
+    :param terminal_width: the width of the terminal.  The default is
+                           inherit from parent context.  If no context
+                           defines the terminal width then auto
+                           detection will be applied.
+    :param max_content_width: the maximum width for content rendered by
+                              Click (this currently only affects help
+                              pages).  This defaults to 80 characters if
+                              not overridden.  In other words: even if the
+                              terminal is larger than that, Click will not
+                              format things wider than 80 characters by
+                              default.  In addition to that, formatters might
+                              add some safety mapping on the right.
+    :param resilient_parsing: if this flag is enabled then Click will
+                              parse without any interactivity or callback
+                              invocation.  Default values will also be
+                              ignored.  This is useful for implementing
+                              things such as completion support.
+    :param allow_extra_args: if this is set to `True` then extra arguments
+                             at the end will not raise an error and will be
+                             kept on the context.  The default is to inherit
+                             from the command.
+    :param allow_interspersed_args: if this is set to `False` then options
+                                    and arguments cannot be mixed.  The
+                                    default is to inherit from the command.
+    :param ignore_unknown_options: instructs click to ignore options it does
+                                   not know and keeps them for later
+                                   processing.
+    :param help_option_names: optionally a list of strings that define how
+                              the default help parameter is named.  The
+                              default is ``['--help']``.
+    :param token_normalize_func: an optional function that is used to
+                                 normalize tokens (options, choices,
+                                 etc.).  This for instance can be used to
+                                 implement case insensitive behavior.
+    :param color: controls if the terminal supports ANSI colors or not.  The
+                  default is autodetection.  This is only needed if ANSI
+                  codes are used in texts that Click prints which is by
+                  default not the case.  This for instance would affect
+                  help output.
+    :param show_default: Show the default value for commands. If this
+        value is not set, it defaults to the value from the parent
+        context. ``Command.show_default`` overrides this default for the
+        specific command.
+
+    .. versionchanged:: 8.1
+        The ``show_default`` parameter is overridden by
+        ``Command.show_default``, instead of the other way around.
+
+    .. versionchanged:: 8.0
+        The ``show_default`` parameter defaults to the value from the
+        parent context.
+
+    .. versionchanged:: 7.1
+       Added the ``show_default`` parameter.
+
+    .. versionchanged:: 4.0
+        Added the ``color``, ``ignore_unknown_options``, and
+        ``max_content_width`` parameters.
+
+    .. versionchanged:: 3.0
+        Added the ``allow_extra_args`` and ``allow_interspersed_args``
+        parameters.
+
+    .. versionchanged:: 2.0
+        Added the ``resilient_parsing``, ``help_option_names``, and
+        ``token_normalize_func`` parameters.
+    """
+
+    #: The formatter class to create with :meth:`make_formatter`.
+    #:
+    #: .. versionadded:: 8.0
+    formatter_class: t.Type["HelpFormatter"] = HelpFormatter
+
+    def __init__(
+        self,
+        command: "Command",
+        parent: t.Optional["Context"] = None,
+        info_name: t.Optional[str] = None,
+        obj: t.Optional[t.Any] = None,
+        auto_envvar_prefix: t.Optional[str] = None,
+        default_map: t.Optional[t.Dict[str, t.Any]] = None,
+        terminal_width: t.Optional[int] = None,
+        max_content_width: t.Optional[int] = None,
+        resilient_parsing: bool = False,
+        allow_extra_args: t.Optional[bool] = None,
+        allow_interspersed_args: t.Optional[bool] = None,
+        ignore_unknown_options: t.Optional[bool] = None,
+        help_option_names: t.Optional[t.List[str]] = None,
+        token_normalize_func: t.Optional[t.Callable[[str], str]] = None,
+        color: t.Optional[bool] = None,
+        show_default: t.Optional[bool] = None,
+    ) -> None:
+        #: the parent context or `None` if none exists.
+        self.parent = parent
+        #: the :class:`Command` for this context.
+        self.command = command
+        #: the descriptive information name
+        self.info_name = info_name
+        #: Map of parameter names to their parsed values. Parameters
+        #: with ``expose_value=False`` are not stored.
+        self.params: t.Dict[str, t.Any] = {}
+        #: the leftover arguments.
+        self.args: t.List[str] = []
+        #: protected arguments.  These are arguments that are prepended
+        #: to `args` when certain parsing scenarios are encountered but
+        #: must be never propagated to another arguments.  This is used
+        #: to implement nested parsing.
+        self.protected_args: t.List[str] = []
+        #: the collected prefixes of the command's options.
+        self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set()
+
+        if obj is None and parent is not None:
+            obj = parent.obj
+
+        #: the user object stored.
+        self.obj: t.Any = obj
+        self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {})
+
+        #: A dictionary (-like object) with defaults for parameters.
+        if (
+            default_map is None
+            and info_name is not None
+            and parent is not None
+            and parent.default_map is not None
+        ):
+            default_map = parent.default_map.get(info_name)
+
+        self.default_map: t.Optional[t.Dict[str, t.Any]] = default_map
+
+        #: This flag indicates if a subcommand is going to be executed. A
+        #: group callback can use this information to figure out if it's
+        #: being executed directly or because the execution flow passes
+        #: onwards to a subcommand. By default it's None, but it can be
+        #: the name of the subcommand to execute.
+        #:
+        #: If chaining is enabled this will be set to ``'*'`` in case
+        #: any commands are executed.  It is however not possible to
+        #: figure out which ones.  If you require this knowledge you
+        #: should use a :func:`result_callback`.
+        self.invoked_subcommand: t.Optional[str] = None
+
+        if terminal_width is None and parent is not None:
+            terminal_width = parent.terminal_width
+
+        #: The width of the terminal (None is autodetection).
+        self.terminal_width: t.Optional[int] = terminal_width
+
+        if max_content_width is None and parent is not None:
+            max_content_width = parent.max_content_width
+
+        #: The maximum width of formatted content (None implies a sensible
+        #: default which is 80 for most things).
+        self.max_content_width: t.Optional[int] = max_content_width
+
+        if allow_extra_args is None:
+            allow_extra_args = command.allow_extra_args
+
+        #: Indicates if the context allows extra args or if it should
+        #: fail on parsing.
+        #:
+        #: .. versionadded:: 3.0
+        self.allow_extra_args = allow_extra_args
+
+        if allow_interspersed_args is None:
+            allow_interspersed_args = command.allow_interspersed_args
+
+        #: Indicates if the context allows mixing of arguments and
+        #: options or not.
+        #:
+        #: .. versionadded:: 3.0
+        self.allow_interspersed_args: bool = allow_interspersed_args
+
+        if ignore_unknown_options is None:
+            ignore_unknown_options = command.ignore_unknown_options
+
+        #: Instructs click to ignore options that a command does not
+        #: understand and will store it on the context for later
+        #: processing.  This is primarily useful for situations where you
+        #: want to call into external programs.  Generally this pattern is
+        #: strongly discouraged because it's not possibly to losslessly
+        #: forward all arguments.
+        #:
+        #: .. versionadded:: 4.0
+        self.ignore_unknown_options: bool = ignore_unknown_options
+
+        if help_option_names is None:
+            if parent is not None:
+                help_option_names = parent.help_option_names
+            else:
+                help_option_names = ["--help"]
+
+        #: The names for the help options.
+        self.help_option_names: t.List[str] = help_option_names
+
+        if token_normalize_func is None and parent is not None:
+            token_normalize_func = parent.token_normalize_func
+
+        #: An optional normalization function for tokens.  This is
+        #: options, choices, commands etc.
+        self.token_normalize_func: t.Optional[
+            t.Callable[[str], str]
+        ] = token_normalize_func
+
+        #: Indicates if resilient parsing is enabled.  In that case Click
+        #: will do its best to not cause any failures and default values
+        #: will be ignored. Useful for completion.
+        self.resilient_parsing: bool = resilient_parsing
+
+        # If there is no envvar prefix yet, but the parent has one and
+        # the command on this level has a name, we can expand the envvar
+        # prefix automatically.
+        if auto_envvar_prefix is None:
+            if (
+                parent is not None
+                and parent.auto_envvar_prefix is not None
+                and self.info_name is not None
+            ):
+                auto_envvar_prefix = (
+                    f"{parent.auto_envvar_prefix}_{self.info_name.upper()}"
+                )
+        else:
+            auto_envvar_prefix = auto_envvar_prefix.upper()
+
+        if auto_envvar_prefix is not None:
+            auto_envvar_prefix = auto_envvar_prefix.replace("-", "_")
+
+        self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix
+
+        if color is None and parent is not None:
+            color = parent.color
+
+        #: Controls if styling output is wanted or not.
+        self.color: t.Optional[bool] = color
+
+        if show_default is None and parent is not None:
+            show_default = parent.show_default
+
+        #: Show option default values when formatting help text.
+        self.show_default: t.Optional[bool] = show_default
+
+        self._close_callbacks: t.List[t.Callable[[], t.Any]] = []
+        self._depth = 0
+        self._parameter_source: t.Dict[str, ParameterSource] = {}
+        self._exit_stack = ExitStack()
+
+    def to_info_dict(self) -> t.Dict[str, t.Any]:
+        """Gather information that could be useful for a tool generating
+        user-facing documentation. This traverses the entire CLI
+        structure.
+
+        .. code-block:: python
+
+            with Context(cli) as ctx:
+                info = ctx.to_info_dict()
+
+        .. versionadded:: 8.0
+        """
+        return {
+            "command": self.command.to_info_dict(self),
+            "info_name": self.info_name,
+            "allow_extra_args": self.allow_extra_args,
+            "allow_interspersed_args": self.allow_interspersed_args,
+            "ignore_unknown_options": self.ignore_unknown_options,
+            "auto_envvar_prefix": self.auto_envvar_prefix,
+        }
+
+    def __enter__(self) -> "Context":
+        self._depth += 1
+        push_context(self)
+        return self
+
+    def __exit__(self, exc_type, exc_value, tb):  # type: ignore
+        self._depth -= 1
+        if self._depth == 0:
+            self.close()
+        pop_context()
+
+    @contextmanager
+    def scope(self, cleanup: bool = True) -> t.Iterator["Context"]:
+        """This helper method can be used with the context object to promote
+        it to the current thread local (see :func:`get_current_context`).
+        The default behavior of this is to invoke the cleanup functions which
+        can be disabled by setting `cleanup` to `False`.  The cleanup
+        functions are typically used for things such as closing file handles.
+
+        If the cleanup is intended the context object can also be directly
+        used as a context manager.
+
+        Example usage::
+
+            with ctx.scope():
+                assert get_current_context() is ctx
+
+        This is equivalent::
+
+            with ctx:
+                assert get_current_context() is ctx
+
+        .. versionadded:: 5.0
+
+        :param cleanup: controls if the cleanup functions should be run or
+                        not.  The default is to run these functions.  In
+                        some situations the context only wants to be
+                        temporarily pushed in which case this can be disabled.
+                        Nested pushes automatically defer the cleanup.
+        """
+        if not cleanup:
+            self._depth += 1
+        try:
+            with self as rv:
+                yield rv
+        finally:
+            if not cleanup:
+                self._depth -= 1
+
+    @property
+    def meta(self) -> t.Dict[str, t.Any]:
+        """This is a dictionary which is shared with all the contexts
+        that are nested.  It exists so that click utilities can store some
+        state here if they need to.  It is however the responsibility of
+        that code to manage this dictionary well.
+
+        The keys are supposed to be unique dotted strings.  For instance
+        module paths are a good choice for it.  What is stored in there is
+        irrelevant for the operation of click.  However what is important is
+        that code that places data here adheres to the general semantics of
+        the system.
+
+        Example usage::
+
+            LANG_KEY = f'{__name__}.lang'
+
+            def set_language(value):
+                ctx = get_current_context()
+                ctx.meta[LANG_KEY] = value
+
+            def get_language():
+                return get_current_context().meta.get(LANG_KEY, 'en_US')
+
+        .. versionadded:: 5.0
+        """
+        return self._meta
+
+    def make_formatter(self) -> HelpFormatter:
+        """Creates the :class:`~click.HelpFormatter` for the help and
+        usage output.
+
+        To quickly customize the formatter class used without overriding
+        this method, set the :attr:`formatter_class` attribute.
+
+        .. versionchanged:: 8.0
+            Added the :attr:`formatter_class` attribute.
+        """
+        return self.formatter_class(
+            width=self.terminal_width, max_width=self.max_content_width
+        )
+
+    def with_resource(self, context_manager: t.ContextManager[V]) -> V:
+        """Register a resource as if it were used in a ``with``
+        statement. The resource will be cleaned up when the context is
+        popped.
+
+        Uses :meth:`contextlib.ExitStack.enter_context`. It calls the
+        resource's ``__enter__()`` method and returns the result. When
+        the context is popped, it closes the stack, which calls the
+        resource's ``__exit__()`` method.
+
+        To register a cleanup function for something that isn't a
+        context manager, use :meth:`call_on_close`. Or use something
+        from :mod:`contextlib` to turn it into a context manager first.
+
+        .. code-block:: python
+
+            @click.group()
+            @click.option("--name")
+            @click.pass_context
+            def cli(ctx):
+                ctx.obj = ctx.with_resource(connect_db(name))
+
+        :param context_manager: The context manager to enter.
+        :return: Whatever ``context_manager.__enter__()`` returns.
+
+        .. versionadded:: 8.0
+        """
+        return self._exit_stack.enter_context(context_manager)
+
+    def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]:
+        """Register a function to be called when the context tears down.
+
+        This can be used to close resources opened during the script
+        execution. Resources that support Python's context manager
+        protocol which would be used in a ``with`` statement should be
+        registered with :meth:`with_resource` instead.
+
+        :param f: The function to execute on teardown.
+        """
+        return self._exit_stack.callback(f)
+
+    def close(self) -> None:
+        """Invoke all close callbacks registered with
+        :meth:`call_on_close`, and exit all context managers entered
+        with :meth:`with_resource`.
+        """
+        self._exit_stack.close()
+        # In case the context is reused, create a new exit stack.
+        self._exit_stack = ExitStack()
+
+    @property
+    def command_path(self) -> str:
+        """The computed command path.  This is used for the ``usage``
+        information on the help page.  It's automatically created by
+        combining the info names of the chain of contexts to the root.
+        """
+        rv = ""
+        if self.info_name is not None:
+            rv = self.info_name
+        if self.parent is not None:
+            parent_command_path = [self.parent.command_path]
+
+            if isinstance(self.parent.command, Command):
+                for param in self.parent.command.get_params(self):
+                    parent_command_path.extend(param.get_usage_pieces(self))
+
+            rv = f"{' '.join(parent_command_path)} {rv}"
+        return rv.lstrip()
+
+    def find_root(self) -> "Context":
+        """Finds the outermost context."""
+        node = self
+        while node.parent is not None:
+            node = node.parent
+        return node
+
+    def find_object(self, object_type: t.Type[V]) -> t.Optional[V]:
+        """Finds the closest object of a given type."""
+        node: t.Optional["Context"] = self
+
+        while node is not None:
+            if isinstance(node.obj, object_type):
+                return node.obj
+
+            node = node.parent
+
+        return None
+
+    def ensure_object(self, object_type: t.Type[V]) -> V:
+        """Like :meth:`find_object` but sets the innermost object to a
+        new instance of `object_type` if it does not exist.
+        """
+        rv = self.find_object(object_type)
+        if rv is None:
+            self.obj = rv = object_type()
+        return rv
+
+    @t.overload
+    def lookup_default(
+        self, name: str, call: "te.Literal[True]" = True
+    ) -> t.Optional[t.Any]:
+        ...
+
+    @t.overload
+    def lookup_default(
+        self, name: str, call: "te.Literal[False]" = ...
+    ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]:
+        ...
+
+    def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]:
+        """Get the default for a parameter from :attr:`default_map`.
+
+        :param name: Name of the parameter.
+        :param call: If the default is a callable, call it. Disable to
+            return the callable instead.
+
+        .. versionchanged:: 8.0
+            Added the ``call`` parameter.
+        """
+        if self.default_map is not None:
+            value = self.default_map.get(name)
+
+            if call and callable(value):
+                return value()
+
+            return value
+
+        return None
+
+    def fail(self, message: str) -> "te.NoReturn":
+        """Aborts the execution of the program with a specific error
+        message.
+
+        :param message: the error message to fail with.
+        """
+        raise UsageError(message, self)
+
+    def abort(self) -> "te.NoReturn":
+        """Aborts the script."""
+        raise Abort()
+
+    def exit(self, code: int = 0) -> "te.NoReturn":
+        """Exits the application with a given exit code."""
+        raise Exit(code)
+
+    def get_usage(self) -> str:
+        """Helper method to get formatted usage string for the current
+        context and command.
+        """
+        return self.command.get_usage(self)
+
+    def get_help(self) -> str:
+        """Helper method to get formatted help page for the current
+        context and command.
+        """
+        return self.command.get_help(self)
+
+    def _make_sub_context(self, command: "Command") -> "Context":
+        """Create a new context of the same type as this context, but
+        for a new command.
+
+        :meta private:
+        """
+        return type(self)(command, info_name=command.name, parent=self)
+
+    def invoke(
+        __self,  # noqa: B902
+        __callback: t.Union["Command", t.Callable[..., t.Any]],
+        *args: t.Any,
+        **kwargs: t.Any,
+    ) -> t.Any:
+        """Invokes a command callback in exactly the way it expects.  There
+        are two ways to invoke this method:
+
+        1.  the first argument can be a callback and all other arguments and
+            keyword arguments are forwarded directly to the function.
+        2.  the first argument is a click command object.  In that case all
+            arguments are forwarded as well but proper click parameters
+            (options and click arguments) must be keyword arguments and Click
+            will fill in defaults.
+
+        Note that before Click 3.2 keyword arguments were not properly filled
+        in against the intention of this code and no context was created.  For
+        more information about this change and why it was done in a bugfix
+        release see :ref:`upgrade-to-3.2`.
+
+        .. versionchanged:: 8.0
+            All ``kwargs`` are tracked in :attr:`params` so they will be
+            passed if :meth:`forward` is called at multiple levels.
+        """
+        if isinstance(__callback, Command):
+            other_cmd = __callback
+
+            if other_cmd.callback is None:
+                raise TypeError(
+                    "The given command does not have a callback that can be invoked."
+                )
+            else:
+                __callback = other_cmd.callback
+
+            ctx = __self._make_sub_context(other_cmd)
+
+            for param in other_cmd.params:
+                if param.name not in kwargs and param.expose_value:
+                    kwargs[param.name] = param.type_cast_value(  # type: ignore
+                        ctx, param.get_default(ctx)
+                    )
+
+            # Track all kwargs as params, so that forward() will pass
+            # them on in subsequent calls.
+            ctx.params.update(kwargs)
+        else:
+            ctx = __self
+
+        with augment_usage_errors(__self):
+            with ctx:
+                return __callback(*args, **kwargs)
+
+    def forward(
+        __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any  # noqa: B902
+    ) -> t.Any:
+        """Similar to :meth:`invoke` but fills in default keyword
+        arguments from the current context if the other command expects
+        it.  This cannot invoke callbacks directly, only other commands.
+
+        .. versionchanged:: 8.0
+            All ``kwargs`` are tracked in :attr:`params` so they will be
+            passed if ``forward`` is called at multiple levels.
+        """
+        # Can only forward to other commands, not direct callbacks.
+        if not isinstance(__cmd, Command):
+            raise TypeError("Callback is not a command.")
+
+        for param in __self.params:
+            if param not in kwargs:
+                kwargs[param] = __self.params[param]
+
+        return __self.invoke(__cmd, *args, **kwargs)
+
+    def set_parameter_source(self, name: str, source: ParameterSource) -> None:
+        """Set the source of a parameter. This indicates the location
+        from which the value of the parameter was obtained.
+
+        :param name: The name of the parameter.
+        :param source: A member of :class:`~click.core.ParameterSource`.
+        """
+        self._parameter_source[name] = source
+
+    def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]:
+        """Get the source of a parameter. This indicates the location
+        from which the value of the parameter was obtained.
+
+        This can be useful for determining when a user specified a value
+        on the command line that is the same as the default value. It
+        will be :attr:`~click.core.ParameterSource.DEFAULT` only if the
+        value was actually taken from the default.
+
+        :param name: The name of the parameter.
+        :rtype: ParameterSource
+
+        .. versionchanged:: 8.0
+            Returns ``None`` if the parameter was not provided from any
+            source.
+        """
+        return self._parameter_source.get(name)
+
+
+class BaseCommand:
+    """The base command implements the minimal API contract of commands.
+    Most code will never use this as it does not implement a lot of useful
+    functionality but it can act as the direct subclass of alternative
+    parsing methods that do not depend on the Click parser.
+
+    For instance, this can be used to bridge Click and other systems like
+    argparse or docopt.
+
+    Because base commands do not implement a lot of the API that other
+    parts of Click take for granted, they are not supported for all
+    operations.  For instance, they cannot be used with the decorators
+    usually and they have no built-in callback system.
+
+    .. versionchanged:: 2.0
+       Added the `context_settings` parameter.
+
+    :param name: the name of the command to use unless a group overrides it.
+    :param context_settings: an optional dictionary with defaults that are
+                             passed to the context object.
+    """
+
+    #: The context class to create with :meth:`make_context`.
+    #:
+    #: .. versionadded:: 8.0
+    context_class: t.Type[Context] = Context
+    #: the default for the :attr:`Context.allow_extra_args` flag.
+    allow_extra_args = False
+    #: the default for the :attr:`Context.allow_interspersed_args` flag.
+    allow_interspersed_args = True
+    #: the default for the :attr:`Context.ignore_unknown_options` flag.
+    ignore_unknown_options = False
+
+    def __init__(
+        self,
+        name: t.Optional[str],
+        context_settings: t.Optional[t.Dict[str, t.Any]] = None,
+    ) -> None:
+        #: the name the command thinks it has.  Upon registering a command
+        #: on a :class:`Group` the group will default the command name
+        #: with this information.  You should instead use the
+        #: :class:`Context`\'s :attr:`~Context.info_name` attribute.
+        self.name = name
+
+        if context_settings is None:
+            context_settings = {}
+
+        #: an optional dictionary with defaults passed to the context.
+        self.context_settings: t.Dict[str, t.Any] = context_settings
+
+    def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]:
+        """Gather information that could be useful for a tool generating
+        user-facing documentation. This traverses the entire structure
+        below this command.
+
+        Use :meth:`click.Context.to_info_dict` to traverse the entire
+        CLI structure.
+
+        :param ctx: A :class:`Context` representing this command.
+
+        .. versionadded:: 8.0
+        """
+        return {"name": self.name}
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__} {self.name}>"
+
+    def get_usage(self, ctx: Context) -> str:
+        raise NotImplementedError("Base commands cannot get usage")
+
+    def get_help(self, ctx: Context) -> str:
+        raise NotImplementedError("Base commands cannot get help")
+
+    def make_context(
+        self,
+        info_name: t.Optional[str],
+        args: t.List[str],
+        parent: t.Optional[Context] = None,
+        **extra: t.Any,
+    ) -> Context:
+        """This function when given an info name and arguments will kick
+        off the parsing and create a new :class:`Context`.  It does not
+        invoke the actual command callback though.
+
+        To quickly customize the context class used without overriding
+        this method, set the :attr:`context_class` attribute.
+
+        :param info_name: the info name for this invocation.  Generally this
+                          is the most descriptive name for the script or
+                          command.  For the toplevel script it's usually
+                          the name of the script, for commands below it it's
+                          the name of the command.
+        :param args: the arguments to parse as list of strings.
+        :param parent: the parent context if available.
+        :param extra: extra keyword arguments forwarded to the context
+                      constructor.
+
+        .. versionchanged:: 8.0
+            Added the :attr:`context_class` attribute.
+        """
+        for key, value in self.context_settings.items():
+            if key not in extra:
+                extra[key] = value
+
+        ctx = self.context_class(
+            self, info_name=info_name, parent=parent, **extra  # type: ignore
+        )
+
+        with ctx.scope(cleanup=False):
+            self.parse_args(ctx, args)
+        return ctx
+
+    def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]:
+        """Given a context and a list of arguments this creates the parser
+        and parses the arguments, then modifies the context as necessary.
+        This is automatically invoked by :meth:`make_context`.
+        """
+        raise NotImplementedError("Base commands do not know how to parse arguments.")
+
+    def invoke(self, ctx: Context) -> t.Any:
+        """Given a context, this invokes the command.  The default
+        implementation is raising a not implemented error.
+        """
+        raise NotImplementedError("Base commands are not invokable by default")
+
+    def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]:
+        """Return a list of completions for the incomplete value. Looks
+        at the names of chained multi-commands.
+
+        Any command could be part of a chained multi-command, so sibling
+        commands are valid at any point during command completion. Other
+        command classes will return more completions.
+
+        :param ctx: Invocation context for this command.
+        :param incomplete: Value being completed. May be empty.
+
+        .. versionadded:: 8.0
+        """
+        from click.shell_completion import CompletionItem
+
+        results: t.List["CompletionItem"] = []
+
+        while ctx.parent is not None:
+            ctx = ctx.parent
+
+            if isinstance(ctx.command, MultiCommand) and ctx.command.chain:
+                results.extend(
+                    CompletionItem(name, help=command.get_short_help_str())
+                    for name, command in _complete_visible_commands(ctx, incomplete)
+                    if name not in ctx.protected_args
+                )
+
+        return results
+
+    @t.overload
+    def main(
+        self,
+        args: t.Optional[t.Sequence[str]] = None,
+        prog_name: t.Optional[str] = None,
+        complete_var: t.Optional[str] = None,
+        standalone_mode: "te.Literal[True]" = True,
+        **extra: t.Any,
+    ) -> "te.NoReturn":
+        ...
+
+    @t.overload
+    def main(
+        self,
+        args: t.Optional[t.Sequence[str]] = None,
+        prog_name: t.Optional[str] = None,
+        complete_var: t.Optional[str] = None,
+        standalone_mode: bool = ...,
+        **extra: t.Any,
+    ) -> t.Any:
+        ...
+
+    def main(
+        self,
+        args: t.Optional[t.Sequence[str]] = None,
+        prog_name: t.Optional[str] = None,
+        complete_var: t.Optional[str] = None,
+        standalone_mode: bool = True,
+        windows_expand_args: bool = True,
+        **extra: t.Any,
+    ) -> t.Any:
+        """This is the way to invoke a script with all the bells and
+        whistles as a command line application.  This will always terminate
+        the application after a call.  If this is not wanted, ``SystemExit``
+        needs to be caught.
+
+        This method is also available by directly calling the instance of
+        a :class:`Command`.
+
+        :param args: the arguments that should be used for parsing.  If not
+                     provided, ``sys.argv[1:]`` is used.
+        :param prog_name: the program name that should be used.  By default
+                          the program name is constructed by taking the file
+                          name from ``sys.argv[0]``.
+        :param complete_var: the environment variable that controls the
+                             bash completion support.  The default is
+                             ``"_<prog_name>_COMPLETE"`` with prog_name in
+                             uppercase.
+        :param standalone_mode: the default behavior is to invoke the script
+                                in standalone mode.  Click will then
+                                handle exceptions and convert them into
+                                error messages and the function will never
+                                return but shut down the interpreter.  If
+                                this is set to `False` they will be
+                                propagated to the caller and the return
+                                value of this function is the return value
+                                of :meth:`invoke`.
+        :param windows_expand_args: Expand glob patterns, user dir, and
+            env vars in command line args on Windows.
+        :param extra: extra keyword arguments are forwarded to the context
+                      constructor.  See :class:`Context` for more information.
+
+        .. versionchanged:: 8.0.1
+            Added the ``windows_expand_args`` parameter to allow
+            disabling command line arg expansion on Windows.
+
+        .. versionchanged:: 8.0
+            When taking arguments from ``sys.argv`` on Windows, glob
+            patterns, user dir, and env vars are expanded.
+
+        .. versionchanged:: 3.0
+           Added the ``standalone_mode`` parameter.
+        """
+        if args is None:
+            args = sys.argv[1:]
+
+            if os.name == "nt" and windows_expand_args:
+                args = _expand_args(args)
+        else:
+            args = list(args)
+
+        if prog_name is None:
+            prog_name = _detect_program_name()
+
+        # Process shell completion requests and exit early.
+        self._main_shell_completion(extra, prog_name, complete_var)
+
+        try:
+            try:
+                with self.make_context(prog_name, args, **extra) as ctx:
+                    rv = self.invoke(ctx)
+                    if not standalone_mode:
+                        return rv
+                    # it's not safe to `ctx.exit(rv)` here!
+                    # note that `rv` may actually contain data like "1" which
+                    # has obvious effects
+                    # more subtle case: `rv=[None, None]` can come out of
+                    # chained commands which all returned `None` -- so it's not
+                    # even always obvious that `rv` indicates success/failure
+                    # by its truthiness/falsiness
+                    ctx.exit()
+            except (EOFError, KeyboardInterrupt):
+                echo(file=sys.stderr)
+                raise Abort() from None
+            except ClickException as e:
+                if not standalone_mode:
+                    raise
+                e.show()
+                sys.exit(e.exit_code)
+            except OSError as e:
+                if e.errno == errno.EPIPE:
+                    sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout))
+                    sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr))
+                    sys.exit(1)
+                else:
+                    raise
+        except Exit as e:
+            if standalone_mode:
+                sys.exit(e.exit_code)
+            else:
+                # in non-standalone mode, return the exit code
+                # note that this is only reached if `self.invoke` above raises
+                # an Exit explicitly -- thus bypassing the check there which
+                # would return its result
+                # the results of non-standalone execution may therefore be
+                # somewhat ambiguous: if there are codepaths which lead to
+                # `ctx.exit(1)` and to `return 1`, the caller won't be able to
+                # tell the difference between the two
+                return e.exit_code
+        except Abort:
+            if not standalone_mode:
+                raise
+            echo(_("Aborted!"), file=sys.stderr)
+            sys.exit(1)
+
+    def _main_shell_completion(
+        self,
+        ctx_args: t.Dict[str, t.Any],
+        prog_name: str,
+        complete_var: t.Optional[str] = None,
+    ) -> None:
+        """Check if the shell is asking for tab completion, process
+        that, then exit early. Called from :meth:`main` before the
+        program is invoked.
+
+        :param prog_name: Name of the executable in the shell.
+        :param complete_var: Name of the environment variable that holds
+            the completion instruction. Defaults to
+            ``_{PROG_NAME}_COMPLETE``.
+        """
+        if complete_var is None:
+            complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper()
+
+        instruction = os.environ.get(complete_var)
+
+        if not instruction:
+            return
+
+        from .shell_completion import shell_complete
+
+        rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction)
+        sys.exit(rv)
+
+    def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
+        """Alias for :meth:`main`."""
+        return self.main(*args, **kwargs)
+
+
+class Command(BaseCommand):
+    """Commands are the basic building block of command line interfaces in
+    Click.  A basic command handles command line parsing and might dispatch
+    more parsing to commands nested below it.
+
+    :param name: the name of the command to use unless a group overrides it.
+    :param context_settings: an optional dictionary with defaults that are
+                             passed to the context object.
+    :param callback: the callback to invoke.  This is optional.
+    :param params: the parameters to register with this command.  This can
+                   be either :class:`Option` or :class:`Argument` objects.
+    :param help: the help string to use for this command.
+    :param epilog: like the help string but it's printed at the end of the
+                   help page after everything else.
+    :param short_help: the short help to use for this command.  This is
+                       shown on the command listing of the parent command.
+    :param add_help_option: by default each command registers a ``--help``
+                            option.  This can be disabled by this parameter.
+    :param no_args_is_help: this controls what happens if no arguments are
+                            provided.  This option is disabled by default.
+                            If enabled this will add ``--help`` as argument
+                            if no arguments are passed
+    :param hidden: hide this command from help outputs.
+
+    :param deprecated: issues a message indicating that
+                             the command is deprecated.
+
+    .. versionchanged:: 8.1
+        ``help``, ``epilog``, and ``short_help`` are stored unprocessed,
+        all formatting is done when outputting help text, not at init,
+        and is done even if not using the ``@command`` decorator.
+
+    .. versionchanged:: 8.0
+        Added a ``repr`` showing the command name.
+
+    .. versionchanged:: 7.1
+        Added the ``no_args_is_help`` parameter.
+
+    .. versionchanged:: 2.0
+        Added the ``context_settings`` parameter.
+    """
+
+    def __init__(
+        self,
+        name: t.Optional[str],
+        context_settings: t.Optional[t.Dict[str, t.Any]] = None,
+        callback: t.Optional[t.Callable[..., t.Any]] = None,
+        params: t.Optional[t.List["Parameter"]] = None,
+        help: t.Optional[str] = None,
+        epilog: t.Optional[str] = None,
+        short_help: t.Optional[str] = None,
+        options_metavar: t.Optional[str] = "[OPTIONS]",
+        add_help_option: bool = True,
+        no_args_is_help: bool = False,
+        hidden: bool = False,
+        deprecated: bool = False,
+    ) -> None:
+        super().__init__(name, context_settings)
+        #: the callback to execute when the command fires.  This might be
+        #: `None` in which case nothing happens.
+        self.callback = callback
+        #: the list of parameters for this command in the order they
+        #: should show up in the help page and execute.  Eager parameters
+        #: will automatically be handled before non eager ones.
+        self.params: t.List["Parameter"] = params or []
+        self.help = help
+        self.epilog = epilog
+        self.options_metavar = options_metavar
+        self.short_help = short_help
+        self.add_help_option = add_help_option
+        self.no_args_is_help = no_args_is_help
+        self.hidden = hidden
+        self.deprecated = deprecated
+
+    def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]:
+        info_dict = super().to_info_dict(ctx)
+        info_dict.update(
+            params=[param.to_info_dict() for param in self.get_params(ctx)],
+            help=self.help,
+            epilog=self.epilog,
+            short_help=self.short_help,
+            hidden=self.hidden,
+            deprecated=self.deprecated,
+        )
+        return info_dict
+
+    def get_usage(self, ctx: Context) -> str:
+        """Formats the usage line into a string and returns it.
+
+        Calls :meth:`format_usage` internally.
+        """
+        formatter = ctx.make_formatter()
+        self.format_usage(ctx, formatter)
+        return formatter.getvalue().rstrip("\n")
+
+    def get_params(self, ctx: Context) -> t.List["Parameter"]:
+        rv = self.params
+        help_option = self.get_help_option(ctx)
+
+        if help_option is not None:
+            rv = [*rv, help_option]
+
+        return rv
+
+    def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None:
+        """Writes the usage line into the formatter.
+
+        This is a low-level method called by :meth:`get_usage`.
+        """
+        pieces = self.collect_usage_pieces(ctx)
+        formatter.write_usage(ctx.command_path, " ".join(pieces))
+
+    def collect_usage_pieces(self, ctx: Context) -> t.List[str]:
+        """Returns all the pieces that go into the usage line and returns
+        it as a list of strings.
+        """
+        rv = [self.options_metavar] if self.options_metavar else []
+
+        for param in self.get_params(ctx):
+            rv.extend(param.get_usage_pieces(ctx))
+
+        return rv
+
+    def get_help_option_names(self, ctx: Context) -> t.List[str]:
+        """Returns the names for the help option."""
+        all_names = set(ctx.help_option_names)
+        for param in self.params:
+            all_names.difference_update(param.opts)
+            all_names.difference_update(param.secondary_opts)
+        return list(all_names)
+
+    def get_help_option(self, ctx: Context) -> t.Optional["Option"]:
+        """Returns the help option object."""
+        help_options = self.get_help_option_names(ctx)
+
+        if not help_options or not self.add_help_option:
+            return None
+
+        def show_help(ctx: Context, param: "Parameter", value: str) -> None:
+            if value and not ctx.resilient_parsing:
+                echo(ctx.get_help(), color=ctx.color)
+                ctx.exit()
+
+        return Option(
+            help_options,
+            is_flag=True,
+            is_eager=True,
+            expose_value=False,
+            callback=show_help,
+            help=_("Show this message and exit."),
+        )
+
+    def make_parser(self, ctx: Context) -> OptionParser:
+        """Creates the underlying option parser for this command."""
+        parser = OptionParser(ctx)
+        for param in self.get_params(ctx):
+            param.add_to_parser(parser, ctx)
+        return parser
+
+    def get_help(self, ctx: Context) -> str:
+        """Formats the help into a string and returns it.
+
+        Calls :meth:`format_help` internally.
+        """
+        formatter = ctx.make_formatter()
+        self.format_help(ctx, formatter)
+        return formatter.getvalue().rstrip("\n")
+
+    def get_short_help_str(self, limit: int = 45) -> str:
+        """Gets short help for the command or makes it by shortening the
+        long help string.
+        """
+        if self.short_help:
+            text = inspect.cleandoc(self.short_help)
+        elif self.help:
+            text = make_default_short_help(self.help, limit)
+        else:
+            text = ""
+
+        if self.deprecated:
+            text = _("(Deprecated) {text}").format(text=text)
+
+        return text.strip()
+
+    def format_help(self, ctx: Context, formatter: HelpFormatter) -> None:
+        """Writes the help into the formatter if it exists.
+
+        This is a low-level method called by :meth:`get_help`.
+
+        This calls the following methods:
+
+        -   :meth:`format_usage`
+        -   :meth:`format_help_text`
+        -   :meth:`format_options`
+        -   :meth:`format_epilog`
+        """
+        self.format_usage(ctx, formatter)
+        self.format_help_text(ctx, formatter)
+        self.format_options(ctx, formatter)
+        self.format_epilog(ctx, formatter)
+
+    def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None:
+        """Writes the help text to the formatter if it exists."""
+        text = self.help if self.help is not None else ""
+
+        if self.deprecated:
+            text = _("(Deprecated) {text}").format(text=text)
+
+        if text:
+            text = inspect.cleandoc(text).partition("\f")[0]
+            formatter.write_paragraph()
+
+            with formatter.indentation():
+                formatter.write_text(text)
+
+    def format_options(self, ctx: Context, formatter: HelpFormatter) -> None:
+        """Writes all the options into the formatter if they exist."""
+        opts = []
+        for param in self.get_params(ctx):
+            rv = param.get_help_record(ctx)
+            if rv is not None:
+                opts.append(rv)
+
+        if opts:
+            with formatter.section(_("Options")):
+                formatter.write_dl(opts)
+
+    def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None:
+        """Writes the epilog into the formatter if it exists."""
+        if self.epilog:
+            epilog = inspect.cleandoc(self.epilog)
+            formatter.write_paragraph()
+
+            with formatter.indentation():
+                formatter.write_text(epilog)
+
+    def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]:
+        if not args and self.no_args_is_help and not ctx.resilient_parsing:
+            echo(ctx.get_help(), color=ctx.color)
+            ctx.exit()
+
+        parser = self.make_parser(ctx)
+        opts, args, param_order = parser.parse_args(args=args)
+
+        for param in iter_params_for_processing(param_order, self.get_params(ctx)):
+            value, args = param.handle_parse_result(ctx, opts, args)
+
+        if args and not ctx.allow_extra_args and not ctx.resilient_parsing:
+            ctx.fail(
+                ngettext(
+                    "Got unexpected extra argument ({args})",
+                    "Got unexpected extra arguments ({args})",
+                    len(args),
+                ).format(args=" ".join(map(str, args)))
+            )
+
+        ctx.args = args
+        ctx._opt_prefixes.update(parser._opt_prefixes)
+        return args
+
+    def invoke(self, ctx: Context) -> t.Any:
+        """Given a context, this invokes the attached callback (if it exists)
+        in the right way.
+        """
+        if self.deprecated:
+            message = _(
+                "DeprecationWarning: The command {name!r} is deprecated."
+            ).format(name=self.name)
+            echo(style(message, fg="red"), err=True)
+
+        if self.callback is not None:
+            return ctx.invoke(self.callback, **ctx.params)
+
+    def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]:
+        """Return a list of completions for the incomplete value. Looks
+        at the names of options and chained multi-commands.
+
+        :param ctx: Invocation context for this command.
+        :param incomplete: Value being completed. May be empty.
+
+        .. versionadded:: 8.0
+        """
+        from click.shell_completion import CompletionItem
+
+        results: t.List["CompletionItem"] = []
+
+        if incomplete and not incomplete[0].isalnum():
+            for param in self.get_params(ctx):
+                if (
+                    not isinstance(param, Option)
+                    or param.hidden
+                    or (
+                        not param.multiple
+                        and ctx.get_parameter_source(param.name)  # type: ignore
+                        is ParameterSource.COMMANDLINE
+                    )
+                ):
+                    continue
+
+                results.extend(
+                    CompletionItem(name, help=param.help)
+                    for name in [*param.opts, *param.secondary_opts]
+                    if name.startswith(incomplete)
+                )
+
+        results.extend(super().shell_complete(ctx, incomplete))
+        return results
+
+
+class MultiCommand(Command):
+    """A multi command is the basic implementation of a command that
+    dispatches to subcommands.  The most common version is the
+    :class:`Group`.
+
+    :param invoke_without_command: this controls how the multi command itself
+                                   is invoked.  By default it's only invoked
+                                   if a subcommand is provided.
+    :param no_args_is_help: this controls what happens if no arguments are
+                            provided.  This option is enabled by default if
+                            `invoke_without_command` is disabled or disabled
+                            if it's enabled.  If enabled this will add
+                            ``--help`` as argument if no arguments are
+                            passed.
+    :param subcommand_metavar: the string that is used in the documentation
+                               to indicate the subcommand place.
+    :param chain: if this is set to `True` chaining of multiple subcommands
+                  is enabled.  This restricts the form of commands in that
+                  they cannot have optional arguments but it allows
+                  multiple commands to be chained together.
+    :param result_callback: The result callback to attach to this multi
+        command. This can be set or changed later with the
+        :meth:`result_callback` decorator.
+    """
+
+    allow_extra_args = True
+    allow_interspersed_args = False
+
+    def __init__(
+        self,
+        name: t.Optional[str] = None,
+        invoke_without_command: bool = False,
+        no_args_is_help: t.Optional[bool] = None,
+        subcommand_metavar: t.Optional[str] = None,
+        chain: bool = False,
+        result_callback: t.Optional[t.Callable[..., t.Any]] = None,
+        **attrs: t.Any,
+    ) -> None:
+        super().__init__(name, **attrs)
+
+        if no_args_is_help is None:
+            no_args_is_help = not invoke_without_command
+
+        self.no_args_is_help = no_args_is_help
+        self.invoke_without_command = invoke_without_command
+
+        if subcommand_metavar is None:
+            if chain:
+                subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..."
+            else:
+                subcommand_metavar = "COMMAND [ARGS]..."
+
+        self.subcommand_metavar = subcommand_metavar
+        self.chain = chain
+        # The result callback that is stored. This can be set or
+        # overridden with the :func:`result_callback` decorator.
+        self._result_callback = result_callback
+
+        if self.chain:
+            for param in self.params:
+                if isinstance(param, Argument) and not param.required:
+                    raise RuntimeError(
+                        "Multi commands in chain mode cannot have"
+                        " optional arguments."
+                    )
+
+    def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]:
+        info_dict = super().to_info_dict(ctx)
+        commands = {}
+
+        for name in self.list_commands(ctx):
+            command = self.get_command(ctx, name)
+
+            if command is None:
+                continue
+
+            sub_ctx = ctx._make_sub_context(command)
+
+            with sub_ctx.scope(cleanup=False):
+                commands[name] = command.to_info_dict(sub_ctx)
+
+        info_dict.update(commands=commands, chain=self.chain)
+        return info_dict
+
+    def collect_usage_pieces(self, ctx: Context) -> t.List[str]:
+        rv = super().collect_usage_pieces(ctx)
+        rv.append(self.subcommand_metavar)
+        return rv
+
+    def format_options(self, ctx: Context, formatter: HelpFormatter) -> None:
+        super().format_options(ctx, formatter)
+        self.format_commands(ctx, formatter)
+
+    def result_callback(self, replace: bool = False) -> t.Callable[[F], F]:
+        """Adds a result callback to the command.  By default if a
+        result callback is already registered this will chain them but
+        this can be disabled with the `replace` parameter.  The result
+        callback is invoked with the return value of the subcommand
+        (or the list of return values from all subcommands if chaining
+        is enabled) as well as the parameters as they would be passed
+        to the main callback.
+
+        Example::
+
+            @click.group()
+            @click.option('-i', '--input', default=23)
+            def cli(input):
+                return 42
+
+            @cli.result_callback()
+            def process_result(result, input):
+                return result + input
+
+        :param replace: if set to `True` an already existing result
+                        callback will be removed.
+
+        .. versionchanged:: 8.0
+            Renamed from ``resultcallback``.
+
+        .. versionadded:: 3.0
+        """
+
+        def decorator(f: F) -> F:
+            old_callback = self._result_callback
+
+            if old_callback is None or replace:
+                self._result_callback = f
+                return f
+
+            def function(__value, *args, **kwargs):  # type: ignore
+                inner = old_callback(__value, *args, **kwargs)  # type: ignore
+                return f(inner, *args, **kwargs)
+
+            self._result_callback = rv = update_wrapper(t.cast(F, function), f)
+            return rv
+
+        return decorator
+
+    def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None:
+        """Extra format methods for multi methods that adds all the commands
+        after the options.
+        """
+        commands = []
+        for subcommand in self.list_commands(ctx):
+            cmd = self.get_command(ctx, subcommand)
+            # What is this, the tool lied about a command.  Ignore it
+            if cmd is None:
+                continue
+            if cmd.hidden:
+                continue
+
+            commands.append((subcommand, cmd))
+
+        # allow for 3 times the default spacing
+        if len(commands):
+            limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands)
+
+            rows = []
+            for subcommand, cmd in commands:
+                help = cmd.get_short_help_str(limit)
+                rows.append((subcommand, help))
+
+            if rows:
+                with formatter.section(_("Commands")):
+                    formatter.write_dl(rows)
+
+    def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]:
+        if not args and self.no_args_is_help and not ctx.resilient_parsing:
+            echo(ctx.get_help(), color=ctx.color)
+            ctx.exit()
+
+        rest = super().parse_args(ctx, args)
+
+        if self.chain:
+            ctx.protected_args = rest
+            ctx.args = []
+        elif rest:
+            ctx.protected_args, ctx.args = rest[:1], rest[1:]
+
+        return ctx.args
+
+    def invoke(self, ctx: Context) -> t.Any:
+        def _process_result(value: t.Any) -> t.Any:
+            if self._result_callback is not None:
+                value = ctx.invoke(self._result_callback, value, **ctx.params)
+            return value
+
+        if not ctx.protected_args:
+            if self.invoke_without_command:
+                # No subcommand was invoked, so the result callback is
+                # invoked with the group return value for regular
+                # groups, or an empty list for chained groups.
+                with ctx:
+                    rv = super().invoke(ctx)
+                    return _process_result([] if self.chain else rv)
+            ctx.fail(_("Missing command."))
+
+        # Fetch args back out
+        args = [*ctx.protected_args, *ctx.args]
+        ctx.args = []
+        ctx.protected_args = []
+
+        # If we're not in chain mode, we only allow the invocation of a
+        # single command but we also inform the current context about the
+        # name of the command to invoke.
+        if not self.chain:
+            # Make sure the context is entered so we do not clean up
+            # resources until the result processor has worked.
+            with ctx:
+                cmd_name, cmd, args = self.resolve_command(ctx, args)
+                assert cmd is not None
+                ctx.invoked_subcommand = cmd_name
+                super().invoke(ctx)
+                sub_ctx = cmd.make_context(cmd_name, args, parent=ctx)
+                with sub_ctx:
+                    return _process_result(sub_ctx.command.invoke(sub_ctx))
+
+        # In chain mode we create the contexts step by step, but after the
+        # base command has been invoked.  Because at that point we do not
+        # know the subcommands yet, the invoked subcommand attribute is
+        # set to ``*`` to inform the command that subcommands are executed
+        # but nothing else.
+        with ctx:
+            ctx.invoked_subcommand = "*" if args else None
+            super().invoke(ctx)
+
+            # Otherwise we make every single context and invoke them in a
+            # chain.  In that case the return value to the result processor
+            # is the list of all invoked subcommand's results.
+            contexts = []
+            while args:
+                cmd_name, cmd, args = self.resolve_command(ctx, args)
+                assert cmd is not None
+                sub_ctx = cmd.make_context(
+                    cmd_name,
+                    args,
+                    parent=ctx,
+                    allow_extra_args=True,
+                    allow_interspersed_args=False,
+                )
+                contexts.append(sub_ctx)
+                args, sub_ctx.args = sub_ctx.args, []
+
+            rv = []
+            for sub_ctx in contexts:
+                with sub_ctx:
+                    rv.append(sub_ctx.command.invoke(sub_ctx))
+            return _process_result(rv)
+
+    def resolve_command(
+        self, ctx: Context, args: t.List[str]
+    ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]:
+        cmd_name = make_str(args[0])
+        original_cmd_name = cmd_name
+
+        # Get the command
+        cmd = self.get_command(ctx, cmd_name)
+
+        # If we can't find the command but there is a normalization
+        # function available, we try with that one.
+        if cmd is None and ctx.token_normalize_func is not None:
+            cmd_name = ctx.token_normalize_func(cmd_name)
+            cmd = self.get_command(ctx, cmd_name)
+
+        # If we don't find the command we want to show an error message
+        # to the user that it was not provided.  However, there is
+        # something else we should do: if the first argument looks like
+        # an option we want to kick off parsing again for arguments to
+        # resolve things like --help which now should go to the main
+        # place.
+        if cmd is None and not ctx.resilient_parsing:
+            if split_opt(cmd_name)[0]:
+                self.parse_args(ctx, ctx.args)
+            ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name))
+        return cmd_name if cmd else None, cmd, args[1:]
+
+    def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]:
+        """Given a context and a command name, this returns a
+        :class:`Command` object if it exists or returns `None`.
+        """
+        raise NotImplementedError
+
+    def list_commands(self, ctx: Context) -> t.List[str]:
+        """Returns a list of subcommand names in the order they should
+        appear.
+        """
+        return []
+
+    def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]:
+        """Return a list of completions for the incomplete value. Looks
+        at the names of options, subcommands, and chained
+        multi-commands.
+
+        :param ctx: Invocation context for this command.
+        :param incomplete: Value being completed. May be empty.
+
+        .. versionadded:: 8.0
+        """
+        from click.shell_completion import CompletionItem
+
+        results = [
+            CompletionItem(name, help=command.get_short_help_str())
+            for name, command in _complete_visible_commands(ctx, incomplete)
+        ]
+        results.extend(super().shell_complete(ctx, incomplete))
+        return results
+
+
+class Group(MultiCommand):
+    """A group allows a command to have subcommands attached. This is
+    the most common way to implement nesting in Click.
+
+    :param name: The name of the group command.
+    :param commands: A dict mapping names to :class:`Command` objects.
+        Can also be a list of :class:`Command`, which will use
+        :attr:`Command.name` to create the dict.
+    :param attrs: Other command arguments described in
+        :class:`MultiCommand`, :class:`Command`, and
+        :class:`BaseCommand`.
+
+    .. versionchanged:: 8.0
+        The ``commmands`` argument can be a list of command objects.
+    """
+
+    #: If set, this is used by the group's :meth:`command` decorator
+    #: as the default :class:`Command` class. This is useful to make all
+    #: subcommands use a custom command class.
+    #:
+    #: .. versionadded:: 8.0
+    command_class: t.Optional[t.Type[Command]] = None
+
+    #: If set, this is used by the group's :meth:`group` decorator
+    #: as the default :class:`Group` class. This is useful to make all
+    #: subgroups use a custom group class.
+    #:
+    #: If set to the special value :class:`type` (literally
+    #: ``group_class = type``), this group's class will be used as the
+    #: default class. This makes a custom group class continue to make
+    #: custom groups.
+    #:
+    #: .. versionadded:: 8.0
+    group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None
+    # Literal[type] isn't valid, so use Type[type]
+
+    def __init__(
+        self,
+        name: t.Optional[str] = None,
+        commands: t.Optional[t.Union[t.Dict[str, Command], t.Sequence[Command]]] = None,
+        **attrs: t.Any,
+    ) -> None:
+        super().__init__(name, **attrs)
+
+        if commands is None:
+            commands = {}
+        elif isinstance(commands, abc.Sequence):
+            commands = {c.name: c for c in commands if c.name is not None}
+
+        #: The registered subcommands by their exported names.
+        self.commands: t.Dict[str, Command] = commands
+
+    def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None:
+        """Registers another :class:`Command` with this group.  If the name
+        is not provided, the name of the command is used.
+        """
+        name = name or cmd.name
+        if name is None:
+            raise TypeError("Command has no name.")
+        _check_multicommand(self, name, cmd, register=True)
+        self.commands[name] = cmd
+
+    @t.overload
+    def command(self, __func: t.Callable[..., t.Any]) -> Command:
+        ...
+
+    @t.overload
+    def command(
+        self, *args: t.Any, **kwargs: t.Any
+    ) -> t.Callable[[t.Callable[..., t.Any]], Command]:
+        ...
+
+    def command(
+        self, *args: t.Any, **kwargs: t.Any
+    ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]:
+        """A shortcut decorator for declaring and attaching a command to
+        the group. This takes the same arguments as :func:`command` and
+        immediately registers the created command with this group by
+        calling :meth:`add_command`.
+
+        To customize the command class used, set the
+        :attr:`command_class` attribute.
+
+        .. versionchanged:: 8.1
+            This decorator can be applied without parentheses.
+
+        .. versionchanged:: 8.0
+            Added the :attr:`command_class` attribute.
+        """
+        from .decorators import command
+
+        if self.command_class and kwargs.get("cls") is None:
+            kwargs["cls"] = self.command_class
+
+        func: t.Optional[t.Callable] = None
+
+        if args and callable(args[0]):
+            assert (
+                len(args) == 1 and not kwargs
+            ), "Use 'command(**kwargs)(callable)' to provide arguments."
+            (func,) = args
+            args = ()
+
+        def decorator(f: t.Callable[..., t.Any]) -> Command:
+            cmd: Command = command(*args, **kwargs)(f)
+            self.add_command(cmd)
+            return cmd
+
+        if func is not None:
+            return decorator(func)
+
+        return decorator
+
+    @t.overload
+    def group(self, __func: t.Callable[..., t.Any]) -> "Group":
+        ...
+
+    @t.overload
+    def group(
+        self, *args: t.Any, **kwargs: t.Any
+    ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]:
+        ...
+
+    def group(
+        self, *args: t.Any, **kwargs: t.Any
+    ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]:
+        """A shortcut decorator for declaring and attaching a group to
+        the group. This takes the same arguments as :func:`group` and
+        immediately registers the created group with this group by
+        calling :meth:`add_command`.
+
+        To customize the group class used, set the :attr:`group_class`
+        attribute.
+
+        .. versionchanged:: 8.1
+            This decorator can be applied without parentheses.
+
+        .. versionchanged:: 8.0
+            Added the :attr:`group_class` attribute.
+        """
+        from .decorators import group
+
+        func: t.Optional[t.Callable] = None
+
+        if args and callable(args[0]):
+            assert (
+                len(args) == 1 and not kwargs
+            ), "Use 'group(**kwargs)(callable)' to provide arguments."
+            (func,) = args
+            args = ()
+
+        if self.group_class is not None and kwargs.get("cls") is None:
+            if self.group_class is type:
+                kwargs["cls"] = type(self)
+            else:
+                kwargs["cls"] = self.group_class
+
+        def decorator(f: t.Callable[..., t.Any]) -> "Group":
+            cmd: Group = group(*args, **kwargs)(f)
+            self.add_command(cmd)
+            return cmd
+
+        if func is not None:
+            return decorator(func)
+
+        return decorator
+
+    def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]:
+        return self.commands.get(cmd_name)
+
+    def list_commands(self, ctx: Context) -> t.List[str]:
+        return sorted(self.commands)
+
+
+class CommandCollection(MultiCommand):
+    """A command collection is a multi command that merges multiple multi
+    commands together into one.  This is a straightforward implementation
+    that accepts a list of different multi commands as sources and
+    provides all the commands for each of them.
+    """
+
+    def __init__(
+        self,
+        name: t.Optional[str] = None,
+        sources: t.Optional[t.List[MultiCommand]] = None,
+        **attrs: t.Any,
+    ) -> None:
+        super().__init__(name, **attrs)
+        #: The list of registered multi commands.
+        self.sources: t.List[MultiCommand] = sources or []
+
+    def add_source(self, multi_cmd: MultiCommand) -> None:
+        """Adds a new multi command to the chain dispatcher."""
+        self.sources.append(multi_cmd)
+
+    def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]:
+        for source in self.sources:
+            rv = source.get_command(ctx, cmd_name)
+
+            if rv is not None:
+                if self.chain:
+                    _check_multicommand(self, cmd_name, rv)
+
+                return rv
+
+        return None
+
+    def list_commands(self, ctx: Context) -> t.List[str]:
+        rv: t.Set[str] = set()
+
+        for source in self.sources:
+            rv.update(source.list_commands(ctx))
+
+        return sorted(rv)
+
+
+def _check_iter(value: t.Any) -> t.Iterator[t.Any]:
+    """Check if the value is iterable but not a string. Raises a type
+    error, or return an iterator over the value.
+    """
+    if isinstance(value, str):
+        raise TypeError
+
+    return iter(value)
+
+
+class Parameter:
+    r"""A parameter to a command comes in two versions: they are either
+    :class:`Option`\s or :class:`Argument`\s.  Other subclasses are currently
+    not supported by design as some of the internals for parsing are
+    intentionally not finalized.
+
+    Some settings are supported by both options and arguments.
+
+    :param param_decls: the parameter declarations for this option or
+                        argument.  This is a list of flags or argument
+                        names.
+    :param type: the type that should be used.  Either a :class:`ParamType`
+                 or a Python type.  The later is converted into the former
+                 automatically if supported.
+    :param required: controls if this is optional or not.
+    :param default: the default value if omitted.  This can also be a callable,
+                    in which case it's invoked when the default is needed
+                    without any arguments.
+    :param callback: A function to further process or validate the value
+        after type conversion. It is called as ``f(ctx, param, value)``
+        and must return the value. It is called for all sources,
+        including prompts.
+    :param nargs: the number of arguments to match.  If not ``1`` the return
+                  value is a tuple instead of single value.  The default for
+                  nargs is ``1`` (except if the type is a tuple, then it's
+                  the arity of the tuple). If ``nargs=-1``, all remaining
+                  parameters are collected.
+    :param metavar: how the value is represented in the help page.
+    :param expose_value: if this is `True` then the value is passed onwards
+                         to the command callback and stored on the context,
+                         otherwise it's skipped.
+    :param is_eager: eager values are processed before non eager ones.  This
+                     should not be set for arguments or it will inverse the
+                     order of processing.
+    :param envvar: a string or list of strings that are environment variables
+                   that should be checked.
+    :param shell_complete: A function that returns custom shell
+        completions. Used instead of the param's type completion if
+        given. Takes ``ctx, param, incomplete`` and must return a list
+        of :class:`~click.shell_completion.CompletionItem` or a list of
+        strings.
+
+    .. versionchanged:: 8.0
+        ``process_value`` validates required parameters and bounded
+        ``nargs``, and invokes the parameter callback before returning
+        the value. This allows the callback to validate prompts.
+        ``full_process_value`` is removed.
+
+    .. versionchanged:: 8.0
+        ``autocompletion`` is renamed to ``shell_complete`` and has new
+        semantics described above. The old name is deprecated and will
+        be removed in 8.1, until then it will be wrapped to match the
+        new requirements.
+
+    .. versionchanged:: 8.0
+        For ``multiple=True, nargs>1``, the default must be a list of
+        tuples.
+
+    .. versionchanged:: 8.0
+        Setting a default is no longer required for ``nargs>1``, it will
+        default to ``None``. ``multiple=True`` or ``nargs=-1`` will
+        default to ``()``.
+
+    .. versionchanged:: 7.1
+        Empty environment variables are ignored rather than taking the
+        empty string value. This makes it possible for scripts to clear
+        variables if they can't unset them.
+
+    .. versionchanged:: 2.0
+        Changed signature for parameter callback to also be passed the
+        parameter. The old callback format will still work, but it will
+        raise a warning to give you a chance to migrate the code easier.
+    """
+
+    param_type_name = "parameter"
+
+    def __init__(
+        self,
+        param_decls: t.Optional[t.Sequence[str]] = None,
+        type: t.Optional[t.Union[types.ParamType, t.Any]] = None,
+        required: bool = False,
+        default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None,
+        callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None,
+        nargs: t.Optional[int] = None,
+        multiple: bool = False,
+        metavar: t.Optional[str] = None,
+        expose_value: bool = True,
+        is_eager: bool = False,
+        envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None,
+        shell_complete: t.Optional[
+            t.Callable[
+                [Context, "Parameter", str],
+                t.Union[t.List["CompletionItem"], t.List[str]],
+            ]
+        ] = None,
+    ) -> None:
+        self.name, self.opts, self.secondary_opts = self._parse_decls(
+            param_decls or (), expose_value
+        )
+        self.type = types.convert_type(type, default)
+
+        # Default nargs to what the type tells us if we have that
+        # information available.
+        if nargs is None:
+            if self.type.is_composite:
+                nargs = self.type.arity
+            else:
+                nargs = 1
+
+        self.required = required
+        self.callback = callback
+        self.nargs = nargs
+        self.multiple = multiple
+        self.expose_value = expose_value
+        self.default = default
+        self.is_eager = is_eager
+        self.metavar = metavar
+        self.envvar = envvar
+        self._custom_shell_complete = shell_complete
+
+        if __debug__:
+            if self.type.is_composite and nargs != self.type.arity:
+                raise ValueError(
+                    f"'nargs' must be {self.type.arity} (or None) for"
+                    f" type {self.type!r}, but it was {nargs}."
+                )
+
+            # Skip no default or callable default.
+            check_default = default if not callable(default) else None
+
+            if check_default is not None:
+                if multiple:
+                    try:
+                        # Only check the first value against nargs.
+                        check_default = next(_check_iter(check_default), None)
+                    except TypeError:
+                        raise ValueError(
+                            "'default' must be a list when 'multiple' is true."
+                        ) from None
+
+                # Can be None for multiple with empty default.
+                if nargs != 1 and check_default is not None:
+                    try:
+                        _check_iter(check_default)
+                    except TypeError:
+                        if multiple:
+                            message = (
+                                "'default' must be a list of lists when 'multiple' is"
+                                " true and 'nargs' != 1."
+                            )
+                        else:
+                            message = "'default' must be a list when 'nargs' != 1."
+
+                        raise ValueError(message) from None
+
+                    if nargs > 1 and len(check_default) != nargs:
+                        subject = "item length" if multiple else "length"
+                        raise ValueError(
+                            f"'default' {subject} must match nargs={nargs}."
+                        )
+
+    def to_info_dict(self) -> t.Dict[str, t.Any]:
+        """Gather information that could be useful for a tool generating
+        user-facing documentation.
+
+        Use :meth:`click.Context.to_info_dict` to traverse the entire
+        CLI structure.
+
+        .. versionadded:: 8.0
+        """
+        return {
+            "name": self.name,
+            "param_type_name": self.param_type_name,
+            "opts": self.opts,
+            "secondary_opts": self.secondary_opts,
+            "type": self.type.to_info_dict(),
+            "required": self.required,
+            "nargs": self.nargs,
+            "multiple": self.multiple,
+            "default": self.default,
+            "envvar": self.envvar,
+        }
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__} {self.name}>"
+
+    def _parse_decls(
+        self, decls: t.Sequence[str], expose_value: bool
+    ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]:
+        raise NotImplementedError()
+
+    @property
+    def human_readable_name(self) -> str:
+        """Returns the human readable name of this parameter.  This is the
+        same as the name for options, but the metavar for arguments.
+        """
+        return self.name  # type: ignore
+
+    def make_metavar(self) -> str:
+        if self.metavar is not None:
+            return self.metavar
+
+        metavar = self.type.get_metavar(self)
+
+        if metavar is None:
+            metavar = self.type.name.upper()
+
+        if self.nargs != 1:
+            metavar += "..."
+
+        return metavar
+
+    @t.overload
+    def get_default(
+        self, ctx: Context, call: "te.Literal[True]" = True
+    ) -> t.Optional[t.Any]:
+        ...
+
+    @t.overload
+    def get_default(
+        self, ctx: Context, call: bool = ...
+    ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]:
+        ...
+
+    def get_default(
+        self, ctx: Context, call: bool = True
+    ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]:
+        """Get the default for the parameter. Tries
+        :meth:`Context.lookup_default` first, then the local default.
+
+        :param ctx: Current context.
+        :param call: If the default is a callable, call it. Disable to
+            return the callable instead.
+
+        .. versionchanged:: 8.0.2
+            Type casting is no longer performed when getting a default.
+
+        .. versionchanged:: 8.0.1
+            Type casting can fail in resilient parsing mode. Invalid
+            defaults will not prevent showing help text.
+
+        .. versionchanged:: 8.0
+            Looks at ``ctx.default_map`` first.
+
+        .. versionchanged:: 8.0
+            Added the ``call`` parameter.
+        """
+        value = ctx.lookup_default(self.name, call=False)  # type: ignore
+
+        if value is None:
+            value = self.default
+
+        if call and callable(value):
+            value = value()
+
+        return value
+
+    def add_to_parser(self, parser: OptionParser, ctx: Context) -> None:
+        raise NotImplementedError()
+
+    def consume_value(
+        self, ctx: Context, opts: t.Mapping[str, t.Any]
+    ) -> t.Tuple[t.Any, ParameterSource]:
+        value = opts.get(self.name)  # type: ignore
+        source = ParameterSource.COMMANDLINE
+
+        if value is None:
+            value = self.value_from_envvar(ctx)
+            source = ParameterSource.ENVIRONMENT
+
+        if value is None:
+            value = ctx.lookup_default(self.name)  # type: ignore
+            source = ParameterSource.DEFAULT_MAP
+
+        if value is None:
+            value = self.get_default(ctx)
+            source = ParameterSource.DEFAULT
+
+        return value, source
+
+    def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any:
+        """Convert and validate a value against the option's
+        :attr:`type`, :attr:`multiple`, and :attr:`nargs`.
+        """
+        if value is None:
+            return () if self.multiple or self.nargs == -1 else None
+
+        def check_iter(value: t.Any) -> t.Iterator:
+            try:
+                return _check_iter(value)
+            except TypeError:
+                # This should only happen when passing in args manually,
+                # the parser should construct an iterable when parsing
+                # the command line.
+                raise BadParameter(
+                    _("Value must be an iterable."), ctx=ctx, param=self
+                ) from None
+
+        if self.nargs == 1 or self.type.is_composite:
+            convert: t.Callable[[t.Any], t.Any] = partial(
+                self.type, param=self, ctx=ctx
+            )
+        elif self.nargs == -1:
+
+            def convert(value: t.Any) -> t.Tuple:
+                return tuple(self.type(x, self, ctx) for x in check_iter(value))
+
+        else:  # nargs > 1
+
+            def convert(value: t.Any) -> t.Tuple:
+                value = tuple(check_iter(value))
+
+                if len(value) != self.nargs:
+                    raise BadParameter(
+                        ngettext(
+                            "Takes {nargs} values but 1 was given.",
+                            "Takes {nargs} values but {len} were given.",
+                            len(value),
+                        ).format(nargs=self.nargs, len=len(value)),
+                        ctx=ctx,
+                        param=self,
+                    )
+
+                return tuple(self.type(x, self, ctx) for x in value)
+
+        if self.multiple:
+            return tuple(convert(x) for x in check_iter(value))
+
+        return convert(value)
+
+    def value_is_missing(self, value: t.Any) -> bool:
+        if value is None:
+            return True
+
+        if (self.nargs != 1 or self.multiple) and value == ():
+            return True
+
+        return False
+
+    def process_value(self, ctx: Context, value: t.Any) -> t.Any:
+        value = self.type_cast_value(ctx, value)
+
+        if self.required and self.value_is_missing(value):
+            raise MissingParameter(ctx=ctx, param=self)
+
+        if self.callback is not None:
+            value = self.callback(ctx, self, value)
+
+        return value
+
+    def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]:
+        if self.envvar is None:
+            return None
+
+        if isinstance(self.envvar, str):
+            rv = os.environ.get(self.envvar)
+
+            if rv:
+                return rv
+        else:
+            for envvar in self.envvar:
+                rv = os.environ.get(envvar)
+
+                if rv:
+                    return rv
+
+        return None
+
+    def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]:
+        rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx)
+
+        if rv is not None and self.nargs != 1:
+            rv = self.type.split_envvar_value(rv)
+
+        return rv
+
+    def handle_parse_result(
+        self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str]
+    ) -> t.Tuple[t.Any, t.List[str]]:
+        with augment_usage_errors(ctx, param=self):
+            value, source = self.consume_value(ctx, opts)
+            ctx.set_parameter_source(self.name, source)  # type: ignore
+
+            try:
+                value = self.process_value(ctx, value)
+            except Exception:
+                if not ctx.resilient_parsing:
+                    raise
+
+                value = None
+
+        if self.expose_value:
+            ctx.params[self.name] = value  # type: ignore
+
+        return value, args
+
+    def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]:
+        pass
+
+    def get_usage_pieces(self, ctx: Context) -> t.List[str]:
+        return []
+
+    def get_error_hint(self, ctx: Context) -> str:
+        """Get a stringified version of the param for use in error messages to
+        indicate which param caused the error.
+        """
+        hint_list = self.opts or [self.human_readable_name]
+        return " / ".join(f"'{x}'" for x in hint_list)
+
+    def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]:
+        """Return a list of completions for the incomplete value. If a
+        ``shell_complete`` function was given during init, it is used.
+        Otherwise, the :attr:`type`
+        :meth:`~click.types.ParamType.shell_complete` function is used.
+
+        :param ctx: Invocation context for this command.
+        :param incomplete: Value being completed. May be empty.
+
+        .. versionadded:: 8.0
+        """
+        if self._custom_shell_complete is not None:
+            results = self._custom_shell_complete(ctx, self, incomplete)
+
+            if results and isinstance(results[0], str):
+                from click.shell_completion import CompletionItem
+
+                results = [CompletionItem(c) for c in results]
+
+            return t.cast(t.List["CompletionItem"], results)
+
+        return self.type.shell_complete(ctx, self, incomplete)
+
+
+class Option(Parameter):
+    """Options are usually optional values on the command line and
+    have some extra features that arguments don't have.
+
+    All other parameters are passed onwards to the parameter constructor.
+
+    :param show_default: Show the default value for this option in its
+        help text. Values are not shown by default, unless
+        :attr:`Context.show_default` is ``True``. If this value is a
+        string, it shows that string in parentheses instead of the
+        actual value. This is particularly useful for dynamic options.
+        For single option boolean flags, the default remains hidden if
+        its value is ``False``.
+    :param show_envvar: Controls if an environment variable should be
+        shown on the help page. Normally, environment variables are not
+        shown.
+    :param prompt: If set to ``True`` or a non empty string then the
+        user will be prompted for input. If set to ``True`` the prompt
+        will be the option name capitalized.
+    :param confirmation_prompt: Prompt a second time to confirm the
+        value if it was prompted for. Can be set to a string instead of
+        ``True`` to customize the message.
+    :param prompt_required: If set to ``False``, the user will be
+        prompted for input only when the option was specified as a flag
+        without a value.
+    :param hide_input: If this is ``True`` then the input on the prompt
+        will be hidden from the user. This is useful for password input.
+    :param is_flag: forces this option to act as a flag.  The default is
+                    auto detection.
+    :param flag_value: which value should be used for this flag if it's
+                       enabled.  This is set to a boolean automatically if
+                       the option string contains a slash to mark two options.
+    :param multiple: if this is set to `True` then the argument is accepted
+                     multiple times and recorded.  This is similar to ``nargs``
+                     in how it works but supports arbitrary number of
+                     arguments.
+    :param count: this flag makes an option increment an integer.
+    :param allow_from_autoenv: if this is enabled then the value of this
+                               parameter will be pulled from an environment
+                               variable in case a prefix is defined on the
+                               context.
+    :param help: the help string.
+    :param hidden: hide this option from help outputs.
+
+    .. versionchanged:: 8.1.0
+        Help text indentation is cleaned here instead of only in the
+        ``@option`` decorator.
+
+    .. versionchanged:: 8.1.0
+        The ``show_default`` parameter overrides
+        ``Context.show_default``.
+
+    .. versionchanged:: 8.1.0
+        The default of a single option boolean flag is not shown if the
+        default value is ``False``.
+
+    .. versionchanged:: 8.0.1
+        ``type`` is detected from ``flag_value`` if given.
+    """
+
+    param_type_name = "option"
+
+    def __init__(
+        self,
+        param_decls: t.Optional[t.Sequence[str]] = None,
+        show_default: t.Union[bool, str, None] = None,
+        prompt: t.Union[bool, str] = False,
+        confirmation_prompt: t.Union[bool, str] = False,
+        prompt_required: bool = True,
+        hide_input: bool = False,
+        is_flag: t.Optional[bool] = None,
+        flag_value: t.Optional[t.Any] = None,
+        multiple: bool = False,
+        count: bool = False,
+        allow_from_autoenv: bool = True,
+        type: t.Optional[t.Union[types.ParamType, t.Any]] = None,
+        help: t.Optional[str] = None,
+        hidden: bool = False,
+        show_choices: bool = True,
+        show_envvar: bool = False,
+        **attrs: t.Any,
+    ) -> None:
+        if help:
+            help = inspect.cleandoc(help)
+
+        default_is_missing = "default" not in attrs
+        super().__init__(param_decls, type=type, multiple=multiple, **attrs)
+
+        if prompt is True:
+            if self.name is None:
+                raise TypeError("'name' is required with 'prompt=True'.")
+
+            prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize()
+        elif prompt is False:
+            prompt_text = None
+        else:
+            prompt_text = prompt
+
+        self.prompt = prompt_text
+        self.confirmation_prompt = confirmation_prompt
+        self.prompt_required = prompt_required
+        self.hide_input = hide_input
+        self.hidden = hidden
+
+        # If prompt is enabled but not required, then the option can be
+        # used as a flag to indicate using prompt or flag_value.
+        self._flag_needs_value = self.prompt is not None and not self.prompt_required
+
+        if is_flag is None:
+            if flag_value is not None:
+                # Implicitly a flag because flag_value was set.
+                is_flag = True
+            elif self._flag_needs_value:
+                # Not a flag, but when used as a flag it shows a prompt.
+                is_flag = False
+            else:
+                # Implicitly a flag because flag options were given.
+                is_flag = bool(self.secondary_opts)
+        elif is_flag is False and not self._flag_needs_value:
+            # Not a flag, and prompt is not enabled, can be used as a
+            # flag if flag_value is set.
+            self._flag_needs_value = flag_value is not None
+
+        if is_flag and default_is_missing and not self.required:
+            self.default: t.Union[t.Any, t.Callable[[], t.Any]] = False
+
+        if flag_value is None:
+            flag_value = not self.default
+
+        if is_flag and type is None:
+            # Re-guess the type from the flag value instead of the
+            # default.
+            self.type = types.convert_type(None, flag_value)
+
+        self.is_flag: bool = is_flag
+        self.is_bool_flag = is_flag and isinstance(self.type, types.BoolParamType)
+        self.flag_value: t.Any = flag_value
+
+        # Counting
+        self.count = count
+        if count:
+            if type is None:
+                self.type = types.IntRange(min=0)
+            if default_is_missing:
+                self.default = 0
+
+        self.allow_from_autoenv = allow_from_autoenv
+        self.help = help
+        self.show_default = show_default
+        self.show_choices = show_choices
+        self.show_envvar = show_envvar
+
+        if __debug__:
+            if self.nargs == -1:
+                raise TypeError("nargs=-1 is not supported for options.")
+
+            if self.prompt and self.is_flag and not self.is_bool_flag:
+                raise TypeError("'prompt' is not valid for non-boolean flag.")
+
+            if not self.is_bool_flag and self.secondary_opts:
+                raise TypeError("Secondary flag is not valid for non-boolean flag.")
+
+            if self.is_bool_flag and self.hide_input and self.prompt is not None:
+                raise TypeError(
+                    "'prompt' with 'hide_input' is not valid for boolean flag."
+                )
+
+            if self.count:
+                if self.multiple:
+                    raise TypeError("'count' is not valid with 'multiple'.")
+
+                if self.is_flag:
+                    raise TypeError("'count' is not valid with 'is_flag'.")
+
+            if self.multiple and self.is_flag:
+                raise TypeError("'multiple' is not valid with 'is_flag', use 'count'.")
+
+    def to_info_dict(self) -> t.Dict[str, t.Any]:
+        info_dict = super().to_info_dict()
+        info_dict.update(
+            help=self.help,
+            prompt=self.prompt,
+            is_flag=self.is_flag,
+            flag_value=self.flag_value,
+            count=self.count,
+            hidden=self.hidden,
+        )
+        return info_dict
+
+    def _parse_decls(
+        self, decls: t.Sequence[str], expose_value: bool
+    ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]:
+        opts = []
+        secondary_opts = []
+        name = None
+        possible_names = []
+
+        for decl in decls:
+            if decl.isidentifier():
+                if name is not None:
+                    raise TypeError(f"Name '{name}' defined twice")
+                name = decl
+            else:
+                split_char = ";" if decl[:1] == "/" else "/"
+                if split_char in decl:
+                    first, second = decl.split(split_char, 1)
+                    first = first.rstrip()
+                    if first:
+                        possible_names.append(split_opt(first))
+                        opts.append(first)
+                    second = second.lstrip()
+                    if second:
+                        secondary_opts.append(second.lstrip())
+                    if first == second:
+                        raise ValueError(
+                            f"Boolean option {decl!r} cannot use the"
+                            " same flag for true/false."
+                        )
+                else:
+                    possible_names.append(split_opt(decl))
+                    opts.append(decl)
+
+        if name is None and possible_names:
+            possible_names.sort(key=lambda x: -len(x[0]))  # group long options first
+            name = possible_names[0][1].replace("-", "_").lower()
+            if not name.isidentifier():
+                name = None
+
+        if name is None:
+            if not expose_value:
+                return None, opts, secondary_opts
+            raise TypeError("Could not determine name for option")
+
+        if not opts and not secondary_opts:
+            raise TypeError(
+                f"No options defined but a name was passed ({name})."
+                " Did you mean to declare an argument instead? Did"
+                f" you mean to pass '--{name}'?"
+            )
+
+        return name, opts, secondary_opts
+
+    def add_to_parser(self, parser: OptionParser, ctx: Context) -> None:
+        if self.multiple:
+            action = "append"
+        elif self.count:
+            action = "count"
+        else:
+            action = "store"
+
+        if self.is_flag:
+            action = f"{action}_const"
+
+            if self.is_bool_flag and self.secondary_opts:
+                parser.add_option(
+                    obj=self, opts=self.opts, dest=self.name, action=action, const=True
+                )
+                parser.add_option(
+                    obj=self,
+                    opts=self.secondary_opts,
+                    dest=self.name,
+                    action=action,
+                    const=False,
+                )
+            else:
+                parser.add_option(
+                    obj=self,
+                    opts=self.opts,
+                    dest=self.name,
+                    action=action,
+                    const=self.flag_value,
+                )
+        else:
+            parser.add_option(
+                obj=self,
+                opts=self.opts,
+                dest=self.name,
+                action=action,
+                nargs=self.nargs,
+            )
+
+    def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]:
+        if self.hidden:
+            return None
+
+        any_prefix_is_slash = False
+
+        def _write_opts(opts: t.Sequence[str]) -> str:
+            nonlocal any_prefix_is_slash
+
+            rv, any_slashes = join_options(opts)
+
+            if any_slashes:
+                any_prefix_is_slash = True
+
+            if not self.is_flag and not self.count:
+                rv += f" {self.make_metavar()}"
+
+            return rv
+
+        rv = [_write_opts(self.opts)]
+
+        if self.secondary_opts:
+            rv.append(_write_opts(self.secondary_opts))
+
+        help = self.help or ""
+        extra = []
+
+        if self.show_envvar:
+            envvar = self.envvar
+
+            if envvar is None:
+                if (
+                    self.allow_from_autoenv
+                    and ctx.auto_envvar_prefix is not None
+                    and self.name is not None
+                ):
+                    envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}"
+
+            if envvar is not None:
+                var_str = (
+                    envvar
+                    if isinstance(envvar, str)
+                    else ", ".join(str(d) for d in envvar)
+                )
+                extra.append(_("env var: {var}").format(var=var_str))
+
+        # Temporarily enable resilient parsing to avoid type casting
+        # failing for the default. Might be possible to extend this to
+        # help formatting in general.
+        resilient = ctx.resilient_parsing
+        ctx.resilient_parsing = True
+
+        try:
+            default_value = self.get_default(ctx, call=False)
+        finally:
+            ctx.resilient_parsing = resilient
+
+        show_default = False
+        show_default_is_str = False
+
+        if self.show_default is not None:
+            if isinstance(self.show_default, str):
+                show_default_is_str = show_default = True
+            else:
+                show_default = self.show_default
+        elif ctx.show_default is not None:
+            show_default = ctx.show_default
+
+        if show_default_is_str or (show_default and (default_value is not None)):
+            if show_default_is_str:
+                default_string = f"({self.show_default})"
+            elif isinstance(default_value, (list, tuple)):
+                default_string = ", ".join(str(d) for d in default_value)
+            elif inspect.isfunction(default_value):
+                default_string = _("(dynamic)")
+            elif self.is_bool_flag and self.secondary_opts:
+                # For boolean flags that have distinct True/False opts,
+                # use the opt without prefix instead of the value.
+                default_string = split_opt(
+                    (self.opts if self.default else self.secondary_opts)[0]
+                )[1]
+            elif self.is_bool_flag and not self.secondary_opts and not default_value:
+                default_string = ""
+            else:
+                default_string = str(default_value)
+
+            if default_string:
+                extra.append(_("default: {default}").format(default=default_string))
+
+        if (
+            isinstance(self.type, types._NumberRangeBase)
+            # skip count with default range type
+            and not (self.count and self.type.min == 0 and self.type.max is None)
+        ):
+            range_str = self.type._describe_range()
+
+            if range_str:
+                extra.append(range_str)
+
+        if self.required:
+            extra.append(_("required"))
+
+        if extra:
+            extra_str = "; ".join(extra)
+            help = f"{help}  [{extra_str}]" if help else f"[{extra_str}]"
+
+        return ("; " if any_prefix_is_slash else " / ").join(rv), help
+
+    @t.overload
+    def get_default(
+        self, ctx: Context, call: "te.Literal[True]" = True
+    ) -> t.Optional[t.Any]:
+        ...
+
+    @t.overload
+    def get_default(
+        self, ctx: Context, call: bool = ...
+    ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]:
+        ...
+
+    def get_default(
+        self, ctx: Context, call: bool = True
+    ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]:
+        # If we're a non boolean flag our default is more complex because
+        # we need to look at all flags in the same group to figure out
+        # if we're the default one in which case we return the flag
+        # value as default.
+        if self.is_flag and not self.is_bool_flag:
+            for param in ctx.command.params:
+                if param.name == self.name and param.default:
+                    return param.flag_value  # type: ignore
+
+            return None
+
+        return super().get_default(ctx, call=call)
+
+    def prompt_for_value(self, ctx: Context) -> t.Any:
+        """This is an alternative flow that can be activated in the full
+        value processing if a value does not exist.  It will prompt the
+        user until a valid value exists and then returns the processed
+        value as result.
+        """
+        assert self.prompt is not None
+
+        # Calculate the default before prompting anything to be stable.
+        default = self.get_default(ctx)
+
+        # If this is a prompt for a flag we need to handle this
+        # differently.
+        if self.is_bool_flag:
+            return confirm(self.prompt, default)
+
+        return prompt(
+            self.prompt,
+            default=default,
+            type=self.type,
+            hide_input=self.hide_input,
+            show_choices=self.show_choices,
+            confirmation_prompt=self.confirmation_prompt,
+            value_proc=lambda x: self.process_value(ctx, x),
+        )
+
+    def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]:
+        rv = super().resolve_envvar_value(ctx)
+
+        if rv is not None:
+            return rv
+
+        if (
+            self.allow_from_autoenv
+            and ctx.auto_envvar_prefix is not None
+            and self.name is not None
+        ):
+            envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}"
+            rv = os.environ.get(envvar)
+
+            if rv:
+                return rv
+
+        return None
+
+    def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]:
+        rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx)
+
+        if rv is None:
+            return None
+
+        value_depth = (self.nargs != 1) + bool(self.multiple)
+
+        if value_depth > 0:
+            rv = self.type.split_envvar_value(rv)
+
+            if self.multiple and self.nargs != 1:
+                rv = batch(rv, self.nargs)
+
+        return rv
+
+    def consume_value(
+        self, ctx: Context, opts: t.Mapping[str, "Parameter"]
+    ) -> t.Tuple[t.Any, ParameterSource]:
+        value, source = super().consume_value(ctx, opts)
+
+        # The parser will emit a sentinel value if the option can be
+        # given as a flag without a value. This is different from None
+        # to distinguish from the flag not being given at all.
+        if value is _flag_needs_value:
+            if self.prompt is not None and not ctx.resilient_parsing:
+                value = self.prompt_for_value(ctx)
+                source = ParameterSource.PROMPT
+            else:
+                value = self.flag_value
+                source = ParameterSource.COMMANDLINE
+
+        elif (
+            self.multiple
+            and value is not None
+            and any(v is _flag_needs_value for v in value)
+        ):
+            value = [self.flag_value if v is _flag_needs_value else v for v in value]
+            source = ParameterSource.COMMANDLINE
+
+        # The value wasn't set, or used the param's default, prompt if
+        # prompting is enabled.
+        elif (
+            source in {None, ParameterSource.DEFAULT}
+            and self.prompt is not None
+            and (self.required or self.prompt_required)
+            and not ctx.resilient_parsing
+        ):
+            value = self.prompt_for_value(ctx)
+            source = ParameterSource.PROMPT
+
+        return value, source
+
+
+class Argument(Parameter):
+    """Arguments are positional parameters to a command.  They generally
+    provide fewer features than options but can have infinite ``nargs``
+    and are required by default.
+
+    All parameters are passed onwards to the parameter constructor.
+    """
+
+    param_type_name = "argument"
+
+    def __init__(
+        self,
+        param_decls: t.Sequence[str],
+        required: t.Optional[bool] = None,
+        **attrs: t.Any,
+    ) -> None:
+        if required is None:
+            if attrs.get("default") is not None:
+                required = False
+            else:
+                required = attrs.get("nargs", 1) > 0
+
+        if "multiple" in attrs:
+            raise TypeError("__init__() got an unexpected keyword argument 'multiple'.")
+
+        super().__init__(param_decls, required=required, **attrs)
+
+        if __debug__:
+            if self.default is not None and self.nargs == -1:
+                raise TypeError("'default' is not supported for nargs=-1.")
+
+    @property
+    def human_readable_name(self) -> str:
+        if self.metavar is not None:
+            return self.metavar
+        return self.name.upper()  # type: ignore
+
+    def make_metavar(self) -> str:
+        if self.metavar is not None:
+            return self.metavar
+        var = self.type.get_metavar(self)
+        if not var:
+            var = self.name.upper()  # type: ignore
+        if not self.required:
+            var = f"[{var}]"
+        if self.nargs != 1:
+            var += "..."
+        return var
+
+    def _parse_decls(
+        self, decls: t.Sequence[str], expose_value: bool
+    ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]:
+        if not decls:
+            if not expose_value:
+                return None, [], []
+            raise TypeError("Could not determine name for argument")
+        if len(decls) == 1:
+            name = arg = decls[0]
+            name = name.replace("-", "_").lower()
+        else:
+            raise TypeError(
+                "Arguments take exactly one parameter declaration, got"
+                f" {len(decls)}."
+            )
+        return name, [arg], []
+
+    def get_usage_pieces(self, ctx: Context) -> t.List[str]:
+        return [self.make_metavar()]
+
+    def get_error_hint(self, ctx: Context) -> str:
+        return f"'{self.make_metavar()}'"
+
+    def add_to_parser(self, parser: OptionParser, ctx: Context) -> None:
+        parser.add_argument(dest=self.name, nargs=self.nargs, obj=self)
diff --git a/venv/lib/python3.9/site-packages/click/decorators.py b/venv/lib/python3.9/site-packages/click/decorators.py
new file mode 100644
index 0000000..28618dc
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/decorators.py
@@ -0,0 +1,497 @@
+import inspect
+import types
+import typing as t
+from functools import update_wrapper
+from gettext import gettext as _
+
+from .core import Argument
+from .core import Command
+from .core import Context
+from .core import Group
+from .core import Option
+from .core import Parameter
+from .globals import get_current_context
+from .utils import echo
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+FC = t.TypeVar("FC", bound=t.Union[t.Callable[..., t.Any], Command])
+
+
+def pass_context(f: F) -> F:
+    """Marks a callback as wanting to receive the current context
+    object as first argument.
+    """
+
+    def new_func(*args, **kwargs):  # type: ignore
+        return f(get_current_context(), *args, **kwargs)
+
+    return update_wrapper(t.cast(F, new_func), f)
+
+
+def pass_obj(f: F) -> F:
+    """Similar to :func:`pass_context`, but only pass the object on the
+    context onwards (:attr:`Context.obj`).  This is useful if that object
+    represents the state of a nested system.
+    """
+
+    def new_func(*args, **kwargs):  # type: ignore
+        return f(get_current_context().obj, *args, **kwargs)
+
+    return update_wrapper(t.cast(F, new_func), f)
+
+
+def make_pass_decorator(
+    object_type: t.Type, ensure: bool = False
+) -> "t.Callable[[F], F]":
+    """Given an object type this creates a decorator that will work
+    similar to :func:`pass_obj` but instead of passing the object of the
+    current context, it will find the innermost context of type
+    :func:`object_type`.
+
+    This generates a decorator that works roughly like this::
+
+        from functools import update_wrapper
+
+        def decorator(f):
+            @pass_context
+            def new_func(ctx, *args, **kwargs):
+                obj = ctx.find_object(object_type)
+                return ctx.invoke(f, obj, *args, **kwargs)
+            return update_wrapper(new_func, f)
+        return decorator
+
+    :param object_type: the type of the object to pass.
+    :param ensure: if set to `True`, a new object will be created and
+                   remembered on the context if it's not there yet.
+    """
+
+    def decorator(f: F) -> F:
+        def new_func(*args, **kwargs):  # type: ignore
+            ctx = get_current_context()
+
+            if ensure:
+                obj = ctx.ensure_object(object_type)
+            else:
+                obj = ctx.find_object(object_type)
+
+            if obj is None:
+                raise RuntimeError(
+                    "Managed to invoke callback without a context"
+                    f" object of type {object_type.__name__!r}"
+                    " existing."
+                )
+
+            return ctx.invoke(f, obj, *args, **kwargs)
+
+        return update_wrapper(t.cast(F, new_func), f)
+
+    return decorator
+
+
+def pass_meta_key(
+    key: str, *, doc_description: t.Optional[str] = None
+) -> "t.Callable[[F], F]":
+    """Create a decorator that passes a key from
+    :attr:`click.Context.meta` as the first argument to the decorated
+    function.
+
+    :param key: Key in ``Context.meta`` to pass.
+    :param doc_description: Description of the object being passed,
+        inserted into the decorator's docstring. Defaults to "the 'key'
+        key from Context.meta".
+
+    .. versionadded:: 8.0
+    """
+
+    def decorator(f: F) -> F:
+        def new_func(*args, **kwargs):  # type: ignore
+            ctx = get_current_context()
+            obj = ctx.meta[key]
+            return ctx.invoke(f, obj, *args, **kwargs)
+
+        return update_wrapper(t.cast(F, new_func), f)
+
+    if doc_description is None:
+        doc_description = f"the {key!r} key from :attr:`click.Context.meta`"
+
+    decorator.__doc__ = (
+        f"Decorator that passes {doc_description} as the first argument"
+        " to the decorated function."
+    )
+    return decorator
+
+
+CmdType = t.TypeVar("CmdType", bound=Command)
+
+
+@t.overload
+def command(
+    __func: t.Callable[..., t.Any],
+) -> Command:
+    ...
+
+
+@t.overload
+def command(
+    name: t.Optional[str] = None,
+    **attrs: t.Any,
+) -> t.Callable[..., Command]:
+    ...
+
+
+@t.overload
+def command(
+    name: t.Optional[str] = None,
+    cls: t.Type[CmdType] = ...,
+    **attrs: t.Any,
+) -> t.Callable[..., CmdType]:
+    ...
+
+
+def command(
+    name: t.Union[str, t.Callable[..., t.Any], None] = None,
+    cls: t.Optional[t.Type[Command]] = None,
+    **attrs: t.Any,
+) -> t.Union[Command, t.Callable[..., Command]]:
+    r"""Creates a new :class:`Command` and uses the decorated function as
+    callback.  This will also automatically attach all decorated
+    :func:`option`\s and :func:`argument`\s as parameters to the command.
+
+    The name of the command defaults to the name of the function with
+    underscores replaced by dashes.  If you want to change that, you can
+    pass the intended name as the first argument.
+
+    All keyword arguments are forwarded to the underlying command class.
+    For the ``params`` argument, any decorated params are appended to
+    the end of the list.
+
+    Once decorated the function turns into a :class:`Command` instance
+    that can be invoked as a command line utility or be attached to a
+    command :class:`Group`.
+
+    :param name: the name of the command.  This defaults to the function
+                 name with underscores replaced by dashes.
+    :param cls: the command class to instantiate.  This defaults to
+                :class:`Command`.
+
+    .. versionchanged:: 8.1
+        This decorator can be applied without parentheses.
+
+    .. versionchanged:: 8.1
+        The ``params`` argument can be used. Decorated params are
+        appended to the end of the list.
+    """
+
+    func: t.Optional[t.Callable[..., t.Any]] = None
+
+    if callable(name):
+        func = name
+        name = None
+        assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class."
+        assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments."
+
+    if cls is None:
+        cls = Command
+
+    def decorator(f: t.Callable[..., t.Any]) -> Command:
+        if isinstance(f, Command):
+            raise TypeError("Attempted to convert a callback into a command twice.")
+
+        attr_params = attrs.pop("params", None)
+        params = attr_params if attr_params is not None else []
+
+        try:
+            decorator_params = f.__click_params__  # type: ignore
+        except AttributeError:
+            pass
+        else:
+            del f.__click_params__  # type: ignore
+            params.extend(reversed(decorator_params))
+
+        if attrs.get("help") is None:
+            attrs["help"] = f.__doc__
+
+        cmd = cls(  # type: ignore[misc]
+            name=name or f.__name__.lower().replace("_", "-"),  # type: ignore[arg-type]
+            callback=f,
+            params=params,
+            **attrs,
+        )
+        cmd.__doc__ = f.__doc__
+        return cmd
+
+    if func is not None:
+        return decorator(func)
+
+    return decorator
+
+
+@t.overload
+def group(
+    __func: t.Callable[..., t.Any],
+) -> Group:
+    ...
+
+
+@t.overload
+def group(
+    name: t.Optional[str] = None,
+    **attrs: t.Any,
+) -> t.Callable[[F], Group]:
+    ...
+
+
+def group(
+    name: t.Union[str, t.Callable[..., t.Any], None] = None, **attrs: t.Any
+) -> t.Union[Group, t.Callable[[F], Group]]:
+    """Creates a new :class:`Group` with a function as callback.  This
+    works otherwise the same as :func:`command` just that the `cls`
+    parameter is set to :class:`Group`.
+
+    .. versionchanged:: 8.1
+        This decorator can be applied without parentheses.
+    """
+    if attrs.get("cls") is None:
+        attrs["cls"] = Group
+
+    if callable(name):
+        grp: t.Callable[[F], Group] = t.cast(Group, command(**attrs))
+        return grp(name)
+
+    return t.cast(Group, command(name, **attrs))
+
+
+def _param_memo(f: FC, param: Parameter) -> None:
+    if isinstance(f, Command):
+        f.params.append(param)
+    else:
+        if not hasattr(f, "__click_params__"):
+            f.__click_params__ = []  # type: ignore
+
+        f.__click_params__.append(param)  # type: ignore
+
+
+def argument(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]:
+    """Attaches an argument to the command.  All positional arguments are
+    passed as parameter declarations to :class:`Argument`; all keyword
+    arguments are forwarded unchanged (except ``cls``).
+    This is equivalent to creating an :class:`Argument` instance manually
+    and attaching it to the :attr:`Command.params` list.
+
+    :param cls: the argument class to instantiate.  This defaults to
+                :class:`Argument`.
+    """
+
+    def decorator(f: FC) -> FC:
+        ArgumentClass = attrs.pop("cls", None) or Argument
+        _param_memo(f, ArgumentClass(param_decls, **attrs))
+        return f
+
+    return decorator
+
+
+def option(*param_decls: str, **attrs: t.Any) -> t.Callable[[FC], FC]:
+    """Attaches an option to the command.  All positional arguments are
+    passed as parameter declarations to :class:`Option`; all keyword
+    arguments are forwarded unchanged (except ``cls``).
+    This is equivalent to creating an :class:`Option` instance manually
+    and attaching it to the :attr:`Command.params` list.
+
+    :param cls: the option class to instantiate.  This defaults to
+                :class:`Option`.
+    """
+
+    def decorator(f: FC) -> FC:
+        # Issue 926, copy attrs, so pre-defined options can re-use the same cls=
+        option_attrs = attrs.copy()
+        OptionClass = option_attrs.pop("cls", None) or Option
+        _param_memo(f, OptionClass(param_decls, **option_attrs))
+        return f
+
+    return decorator
+
+
+def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
+    """Add a ``--yes`` option which shows a prompt before continuing if
+    not passed. If the prompt is declined, the program will exit.
+
+    :param param_decls: One or more option names. Defaults to the single
+        value ``"--yes"``.
+    :param kwargs: Extra arguments are passed to :func:`option`.
+    """
+
+    def callback(ctx: Context, param: Parameter, value: bool) -> None:
+        if not value:
+            ctx.abort()
+
+    if not param_decls:
+        param_decls = ("--yes",)
+
+    kwargs.setdefault("is_flag", True)
+    kwargs.setdefault("callback", callback)
+    kwargs.setdefault("expose_value", False)
+    kwargs.setdefault("prompt", "Do you want to continue?")
+    kwargs.setdefault("help", "Confirm the action without prompting.")
+    return option(*param_decls, **kwargs)
+
+
+def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
+    """Add a ``--password`` option which prompts for a password, hiding
+    input and asking to enter the value again for confirmation.
+
+    :param param_decls: One or more option names. Defaults to the single
+        value ``"--password"``.
+    :param kwargs: Extra arguments are passed to :func:`option`.
+    """
+    if not param_decls:
+        param_decls = ("--password",)
+
+    kwargs.setdefault("prompt", True)
+    kwargs.setdefault("confirmation_prompt", True)
+    kwargs.setdefault("hide_input", True)
+    return option(*param_decls, **kwargs)
+
+
+def version_option(
+    version: t.Optional[str] = None,
+    *param_decls: str,
+    package_name: t.Optional[str] = None,
+    prog_name: t.Optional[str] = None,
+    message: t.Optional[str] = None,
+    **kwargs: t.Any,
+) -> t.Callable[[FC], FC]:
+    """Add a ``--version`` option which immediately prints the version
+    number and exits the program.
+
+    If ``version`` is not provided, Click will try to detect it using
+    :func:`importlib.metadata.version` to get the version for the
+    ``package_name``. On Python < 3.8, the ``importlib_metadata``
+    backport must be installed.
+
+    If ``package_name`` is not provided, Click will try to detect it by
+    inspecting the stack frames. This will be used to detect the
+    version, so it must match the name of the installed package.
+
+    :param version: The version number to show. If not provided, Click
+        will try to detect it.
+    :param param_decls: One or more option names. Defaults to the single
+        value ``"--version"``.
+    :param package_name: The package name to detect the version from. If
+        not provided, Click will try to detect it.
+    :param prog_name: The name of the CLI to show in the message. If not
+        provided, it will be detected from the command.
+    :param message: The message to show. The values ``%(prog)s``,
+        ``%(package)s``, and ``%(version)s`` are available. Defaults to
+        ``"%(prog)s, version %(version)s"``.
+    :param kwargs: Extra arguments are passed to :func:`option`.
+    :raise RuntimeError: ``version`` could not be detected.
+
+    .. versionchanged:: 8.0
+        Add the ``package_name`` parameter, and the ``%(package)s``
+        value for messages.
+
+    .. versionchanged:: 8.0
+        Use :mod:`importlib.metadata` instead of ``pkg_resources``. The
+        version is detected based on the package name, not the entry
+        point name. The Python package name must match the installed
+        package name, or be passed with ``package_name=``.
+    """
+    if message is None:
+        message = _("%(prog)s, version %(version)s")
+
+    if version is None and package_name is None:
+        frame = inspect.currentframe()
+        f_back = frame.f_back if frame is not None else None
+        f_globals = f_back.f_globals if f_back is not None else None
+        # break reference cycle
+        # https://docs.python.org/3/library/inspect.html#the-interpreter-stack
+        del frame
+
+        if f_globals is not None:
+            package_name = f_globals.get("__name__")
+
+            if package_name == "__main__":
+                package_name = f_globals.get("__package__")
+
+            if package_name:
+                package_name = package_name.partition(".")[0]
+
+    def callback(ctx: Context, param: Parameter, value: bool) -> None:
+        if not value or ctx.resilient_parsing:
+            return
+
+        nonlocal prog_name
+        nonlocal version
+
+        if prog_name is None:
+            prog_name = ctx.find_root().info_name
+
+        if version is None and package_name is not None:
+            metadata: t.Optional[types.ModuleType]
+
+            try:
+                from importlib import metadata  # type: ignore
+            except ImportError:
+                # Python < 3.8
+                import importlib_metadata as metadata  # type: ignore
+
+            try:
+                version = metadata.version(package_name)  # type: ignore
+            except metadata.PackageNotFoundError:  # type: ignore
+                raise RuntimeError(
+                    f"{package_name!r} is not installed. Try passing"
+                    " 'package_name' instead."
+                ) from None
+
+        if version is None:
+            raise RuntimeError(
+                f"Could not determine the version for {package_name!r} automatically."
+            )
+
+        echo(
+            t.cast(str, message)
+            % {"prog": prog_name, "package": package_name, "version": version},
+            color=ctx.color,
+        )
+        ctx.exit()
+
+    if not param_decls:
+        param_decls = ("--version",)
+
+    kwargs.setdefault("is_flag", True)
+    kwargs.setdefault("expose_value", False)
+    kwargs.setdefault("is_eager", True)
+    kwargs.setdefault("help", _("Show the version and exit."))
+    kwargs["callback"] = callback
+    return option(*param_decls, **kwargs)
+
+
+def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
+    """Add a ``--help`` option which immediately prints the help page
+    and exits the program.
+
+    This is usually unnecessary, as the ``--help`` option is added to
+    each command automatically unless ``add_help_option=False`` is
+    passed.
+
+    :param param_decls: One or more option names. Defaults to the single
+        value ``"--help"``.
+    :param kwargs: Extra arguments are passed to :func:`option`.
+    """
+
+    def callback(ctx: Context, param: Parameter, value: bool) -> None:
+        if not value or ctx.resilient_parsing:
+            return
+
+        echo(ctx.get_help(), color=ctx.color)
+        ctx.exit()
+
+    if not param_decls:
+        param_decls = ("--help",)
+
+    kwargs.setdefault("is_flag", True)
+    kwargs.setdefault("expose_value", False)
+    kwargs.setdefault("is_eager", True)
+    kwargs.setdefault("help", _("Show this message and exit."))
+    kwargs["callback"] = callback
+    return option(*param_decls, **kwargs)
diff --git a/venv/lib/python3.9/site-packages/click/exceptions.py b/venv/lib/python3.9/site-packages/click/exceptions.py
new file mode 100644
index 0000000..9e20b3e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/exceptions.py
@@ -0,0 +1,287 @@
+import os
+import typing as t
+from gettext import gettext as _
+from gettext import ngettext
+
+from ._compat import get_text_stderr
+from .utils import echo
+
+if t.TYPE_CHECKING:
+    from .core import Context
+    from .core import Parameter
+
+
+def _join_param_hints(
+    param_hint: t.Optional[t.Union[t.Sequence[str], str]]
+) -> t.Optional[str]:
+    if param_hint is not None and not isinstance(param_hint, str):
+        return " / ".join(repr(x) for x in param_hint)
+
+    return param_hint
+
+
+class ClickException(Exception):
+    """An exception that Click can handle and show to the user."""
+
+    #: The exit code for this exception.
+    exit_code = 1
+
+    def __init__(self, message: str) -> None:
+        super().__init__(message)
+        self.message = message
+
+    def format_message(self) -> str:
+        return self.message
+
+    def __str__(self) -> str:
+        return self.message
+
+    def show(self, file: t.Optional[t.IO] = None) -> None:
+        if file is None:
+            file = get_text_stderr()
+
+        echo(_("Error: {message}").format(message=self.format_message()), file=file)
+
+
+class UsageError(ClickException):
+    """An internal exception that signals a usage error.  This typically
+    aborts any further handling.
+
+    :param message: the error message to display.
+    :param ctx: optionally the context that caused this error.  Click will
+                fill in the context automatically in some situations.
+    """
+
+    exit_code = 2
+
+    def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None:
+        super().__init__(message)
+        self.ctx = ctx
+        self.cmd = self.ctx.command if self.ctx else None
+
+    def show(self, file: t.Optional[t.IO] = None) -> None:
+        if file is None:
+            file = get_text_stderr()
+        color = None
+        hint = ""
+        if (
+            self.ctx is not None
+            and self.ctx.command.get_help_option(self.ctx) is not None
+        ):
+            hint = _("Try '{command} {option}' for help.").format(
+                command=self.ctx.command_path, option=self.ctx.help_option_names[0]
+            )
+            hint = f"{hint}\n"
+        if self.ctx is not None:
+            color = self.ctx.color
+            echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color)
+        echo(
+            _("Error: {message}").format(message=self.format_message()),
+            file=file,
+            color=color,
+        )
+
+
+class BadParameter(UsageError):
+    """An exception that formats out a standardized error message for a
+    bad parameter.  This is useful when thrown from a callback or type as
+    Click will attach contextual information to it (for instance, which
+    parameter it is).
+
+    .. versionadded:: 2.0
+
+    :param param: the parameter object that caused this error.  This can
+                  be left out, and Click will attach this info itself
+                  if possible.
+    :param param_hint: a string that shows up as parameter name.  This
+                       can be used as alternative to `param` in cases
+                       where custom validation should happen.  If it is
+                       a string it's used as such, if it's a list then
+                       each item is quoted and separated.
+    """
+
+    def __init__(
+        self,
+        message: str,
+        ctx: t.Optional["Context"] = None,
+        param: t.Optional["Parameter"] = None,
+        param_hint: t.Optional[str] = None,
+    ) -> None:
+        super().__init__(message, ctx)
+        self.param = param
+        self.param_hint = param_hint
+
+    def format_message(self) -> str:
+        if self.param_hint is not None:
+            param_hint = self.param_hint
+        elif self.param is not None:
+            param_hint = self.param.get_error_hint(self.ctx)  # type: ignore
+        else:
+            return _("Invalid value: {message}").format(message=self.message)
+
+        return _("Invalid value for {param_hint}: {message}").format(
+            param_hint=_join_param_hints(param_hint), message=self.message
+        )
+
+
+class MissingParameter(BadParameter):
+    """Raised if click required an option or argument but it was not
+    provided when invoking the script.
+
+    .. versionadded:: 4.0
+
+    :param param_type: a string that indicates the type of the parameter.
+                       The default is to inherit the parameter type from
+                       the given `param`.  Valid values are ``'parameter'``,
+                       ``'option'`` or ``'argument'``.
+    """
+
+    def __init__(
+        self,
+        message: t.Optional[str] = None,
+        ctx: t.Optional["Context"] = None,
+        param: t.Optional["Parameter"] = None,
+        param_hint: t.Optional[str] = None,
+        param_type: t.Optional[str] = None,
+    ) -> None:
+        super().__init__(message or "", ctx, param, param_hint)
+        self.param_type = param_type
+
+    def format_message(self) -> str:
+        if self.param_hint is not None:
+            param_hint: t.Optional[str] = self.param_hint
+        elif self.param is not None:
+            param_hint = self.param.get_error_hint(self.ctx)  # type: ignore
+        else:
+            param_hint = None
+
+        param_hint = _join_param_hints(param_hint)
+        param_hint = f" {param_hint}" if param_hint else ""
+
+        param_type = self.param_type
+        if param_type is None and self.param is not None:
+            param_type = self.param.param_type_name
+
+        msg = self.message
+        if self.param is not None:
+            msg_extra = self.param.type.get_missing_message(self.param)
+            if msg_extra:
+                if msg:
+                    msg += f". {msg_extra}"
+                else:
+                    msg = msg_extra
+
+        msg = f" {msg}" if msg else ""
+
+        # Translate param_type for known types.
+        if param_type == "argument":
+            missing = _("Missing argument")
+        elif param_type == "option":
+            missing = _("Missing option")
+        elif param_type == "parameter":
+            missing = _("Missing parameter")
+        else:
+            missing = _("Missing {param_type}").format(param_type=param_type)
+
+        return f"{missing}{param_hint}.{msg}"
+
+    def __str__(self) -> str:
+        if not self.message:
+            param_name = self.param.name if self.param else None
+            return _("Missing parameter: {param_name}").format(param_name=param_name)
+        else:
+            return self.message
+
+
+class NoSuchOption(UsageError):
+    """Raised if click attempted to handle an option that does not
+    exist.
+
+    .. versionadded:: 4.0
+    """
+
+    def __init__(
+        self,
+        option_name: str,
+        message: t.Optional[str] = None,
+        possibilities: t.Optional[t.Sequence[str]] = None,
+        ctx: t.Optional["Context"] = None,
+    ) -> None:
+        if message is None:
+            message = _("No such option: {name}").format(name=option_name)
+
+        super().__init__(message, ctx)
+        self.option_name = option_name
+        self.possibilities = possibilities
+
+    def format_message(self) -> str:
+        if not self.possibilities:
+            return self.message
+
+        possibility_str = ", ".join(sorted(self.possibilities))
+        suggest = ngettext(
+            "Did you mean {possibility}?",
+            "(Possible options: {possibilities})",
+            len(self.possibilities),
+        ).format(possibility=possibility_str, possibilities=possibility_str)
+        return f"{self.message} {suggest}"
+
+
+class BadOptionUsage(UsageError):
+    """Raised if an option is generally supplied but the use of the option
+    was incorrect.  This is for instance raised if the number of arguments
+    for an option is not correct.
+
+    .. versionadded:: 4.0
+
+    :param option_name: the name of the option being used incorrectly.
+    """
+
+    def __init__(
+        self, option_name: str, message: str, ctx: t.Optional["Context"] = None
+    ) -> None:
+        super().__init__(message, ctx)
+        self.option_name = option_name
+
+
+class BadArgumentUsage(UsageError):
+    """Raised if an argument is generally supplied but the use of the argument
+    was incorrect.  This is for instance raised if the number of values
+    for an argument is not correct.
+
+    .. versionadded:: 6.0
+    """
+
+
+class FileError(ClickException):
+    """Raised if a file cannot be opened."""
+
+    def __init__(self, filename: str, hint: t.Optional[str] = None) -> None:
+        if hint is None:
+            hint = _("unknown error")
+
+        super().__init__(hint)
+        self.ui_filename = os.fsdecode(filename)
+        self.filename = filename
+
+    def format_message(self) -> str:
+        return _("Could not open file {filename!r}: {message}").format(
+            filename=self.ui_filename, message=self.message
+        )
+
+
+class Abort(RuntimeError):
+    """An internal signalling exception that signals Click to abort."""
+
+
+class Exit(RuntimeError):
+    """An exception that indicates that the application should exit with some
+    status code.
+
+    :param code: the status code to exit with.
+    """
+
+    __slots__ = ("exit_code",)
+
+    def __init__(self, code: int = 0) -> None:
+        self.exit_code = code
diff --git a/venv/lib/python3.9/site-packages/click/formatting.py b/venv/lib/python3.9/site-packages/click/formatting.py
new file mode 100644
index 0000000..ddd2a2f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/formatting.py
@@ -0,0 +1,301 @@
+import typing as t
+from contextlib import contextmanager
+from gettext import gettext as _
+
+from ._compat import term_len
+from .parser import split_opt
+
+# Can force a width.  This is used by the test system
+FORCED_WIDTH: t.Optional[int] = None
+
+
+def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]:
+    widths: t.Dict[int, int] = {}
+
+    for row in rows:
+        for idx, col in enumerate(row):
+            widths[idx] = max(widths.get(idx, 0), term_len(col))
+
+    return tuple(y for x, y in sorted(widths.items()))
+
+
+def iter_rows(
+    rows: t.Iterable[t.Tuple[str, str]], col_count: int
+) -> t.Iterator[t.Tuple[str, ...]]:
+    for row in rows:
+        yield row + ("",) * (col_count - len(row))
+
+
+def wrap_text(
+    text: str,
+    width: int = 78,
+    initial_indent: str = "",
+    subsequent_indent: str = "",
+    preserve_paragraphs: bool = False,
+) -> str:
+    """A helper function that intelligently wraps text.  By default, it
+    assumes that it operates on a single paragraph of text but if the
+    `preserve_paragraphs` parameter is provided it will intelligently
+    handle paragraphs (defined by two empty lines).
+
+    If paragraphs are handled, a paragraph can be prefixed with an empty
+    line containing the ``\\b`` character (``\\x08``) to indicate that
+    no rewrapping should happen in that block.
+
+    :param text: the text that should be rewrapped.
+    :param width: the maximum width for the text.
+    :param initial_indent: the initial indent that should be placed on the
+                           first line as a string.
+    :param subsequent_indent: the indent string that should be placed on
+                              each consecutive line.
+    :param preserve_paragraphs: if this flag is set then the wrapping will
+                                intelligently handle paragraphs.
+    """
+    from ._textwrap import TextWrapper
+
+    text = text.expandtabs()
+    wrapper = TextWrapper(
+        width,
+        initial_indent=initial_indent,
+        subsequent_indent=subsequent_indent,
+        replace_whitespace=False,
+    )
+    if not preserve_paragraphs:
+        return wrapper.fill(text)
+
+    p: t.List[t.Tuple[int, bool, str]] = []
+    buf: t.List[str] = []
+    indent = None
+
+    def _flush_par() -> None:
+        if not buf:
+            return
+        if buf[0].strip() == "\b":
+            p.append((indent or 0, True, "\n".join(buf[1:])))
+        else:
+            p.append((indent or 0, False, " ".join(buf)))
+        del buf[:]
+
+    for line in text.splitlines():
+        if not line:
+            _flush_par()
+            indent = None
+        else:
+            if indent is None:
+                orig_len = term_len(line)
+                line = line.lstrip()
+                indent = orig_len - term_len(line)
+            buf.append(line)
+    _flush_par()
+
+    rv = []
+    for indent, raw, text in p:
+        with wrapper.extra_indent(" " * indent):
+            if raw:
+                rv.append(wrapper.indent_only(text))
+            else:
+                rv.append(wrapper.fill(text))
+
+    return "\n\n".join(rv)
+
+
+class HelpFormatter:
+    """This class helps with formatting text-based help pages.  It's
+    usually just needed for very special internal cases, but it's also
+    exposed so that developers can write their own fancy outputs.
+
+    At present, it always writes into memory.
+
+    :param indent_increment: the additional increment for each level.
+    :param width: the width for the text.  This defaults to the terminal
+                  width clamped to a maximum of 78.
+    """
+
+    def __init__(
+        self,
+        indent_increment: int = 2,
+        width: t.Optional[int] = None,
+        max_width: t.Optional[int] = None,
+    ) -> None:
+        import shutil
+
+        self.indent_increment = indent_increment
+        if max_width is None:
+            max_width = 80
+        if width is None:
+            width = FORCED_WIDTH
+            if width is None:
+                width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50)
+        self.width = width
+        self.current_indent = 0
+        self.buffer: t.List[str] = []
+
+    def write(self, string: str) -> None:
+        """Writes a unicode string into the internal buffer."""
+        self.buffer.append(string)
+
+    def indent(self) -> None:
+        """Increases the indentation."""
+        self.current_indent += self.indent_increment
+
+    def dedent(self) -> None:
+        """Decreases the indentation."""
+        self.current_indent -= self.indent_increment
+
+    def write_usage(
+        self, prog: str, args: str = "", prefix: t.Optional[str] = None
+    ) -> None:
+        """Writes a usage line into the buffer.
+
+        :param prog: the program name.
+        :param args: whitespace separated list of arguments.
+        :param prefix: The prefix for the first line. Defaults to
+            ``"Usage: "``.
+        """
+        if prefix is None:
+            prefix = f"{_('Usage:')} "
+
+        usage_prefix = f"{prefix:>{self.current_indent}}{prog} "
+        text_width = self.width - self.current_indent
+
+        if text_width >= (term_len(usage_prefix) + 20):
+            # The arguments will fit to the right of the prefix.
+            indent = " " * term_len(usage_prefix)
+            self.write(
+                wrap_text(
+                    args,
+                    text_width,
+                    initial_indent=usage_prefix,
+                    subsequent_indent=indent,
+                )
+            )
+        else:
+            # The prefix is too long, put the arguments on the next line.
+            self.write(usage_prefix)
+            self.write("\n")
+            indent = " " * (max(self.current_indent, term_len(prefix)) + 4)
+            self.write(
+                wrap_text(
+                    args, text_width, initial_indent=indent, subsequent_indent=indent
+                )
+            )
+
+        self.write("\n")
+
+    def write_heading(self, heading: str) -> None:
+        """Writes a heading into the buffer."""
+        self.write(f"{'':>{self.current_indent}}{heading}:\n")
+
+    def write_paragraph(self) -> None:
+        """Writes a paragraph into the buffer."""
+        if self.buffer:
+            self.write("\n")
+
+    def write_text(self, text: str) -> None:
+        """Writes re-indented text into the buffer.  This rewraps and
+        preserves paragraphs.
+        """
+        indent = " " * self.current_indent
+        self.write(
+            wrap_text(
+                text,
+                self.width,
+                initial_indent=indent,
+                subsequent_indent=indent,
+                preserve_paragraphs=True,
+            )
+        )
+        self.write("\n")
+
+    def write_dl(
+        self,
+        rows: t.Sequence[t.Tuple[str, str]],
+        col_max: int = 30,
+        col_spacing: int = 2,
+    ) -> None:
+        """Writes a definition list into the buffer.  This is how options
+        and commands are usually formatted.
+
+        :param rows: a list of two item tuples for the terms and values.
+        :param col_max: the maximum width of the first column.
+        :param col_spacing: the number of spaces between the first and
+                            second column.
+        """
+        rows = list(rows)
+        widths = measure_table(rows)
+        if len(widths) != 2:
+            raise TypeError("Expected two columns for definition list")
+
+        first_col = min(widths[0], col_max) + col_spacing
+
+        for first, second in iter_rows(rows, len(widths)):
+            self.write(f"{'':>{self.current_indent}}{first}")
+            if not second:
+                self.write("\n")
+                continue
+            if term_len(first) <= first_col - col_spacing:
+                self.write(" " * (first_col - term_len(first)))
+            else:
+                self.write("\n")
+                self.write(" " * (first_col + self.current_indent))
+
+            text_width = max(self.width - first_col - 2, 10)
+            wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True)
+            lines = wrapped_text.splitlines()
+
+            if lines:
+                self.write(f"{lines[0]}\n")
+
+                for line in lines[1:]:
+                    self.write(f"{'':>{first_col + self.current_indent}}{line}\n")
+            else:
+                self.write("\n")
+
+    @contextmanager
+    def section(self, name: str) -> t.Iterator[None]:
+        """Helpful context manager that writes a paragraph, a heading,
+        and the indents.
+
+        :param name: the section name that is written as heading.
+        """
+        self.write_paragraph()
+        self.write_heading(name)
+        self.indent()
+        try:
+            yield
+        finally:
+            self.dedent()
+
+    @contextmanager
+    def indentation(self) -> t.Iterator[None]:
+        """A context manager that increases the indentation."""
+        self.indent()
+        try:
+            yield
+        finally:
+            self.dedent()
+
+    def getvalue(self) -> str:
+        """Returns the buffer contents."""
+        return "".join(self.buffer)
+
+
+def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]:
+    """Given a list of option strings this joins them in the most appropriate
+    way and returns them in the form ``(formatted_string,
+    any_prefix_is_slash)`` where the second item in the tuple is a flag that
+    indicates if any of the option prefixes was a slash.
+    """
+    rv = []
+    any_prefix_is_slash = False
+
+    for opt in options:
+        prefix = split_opt(opt)[0]
+
+        if prefix == "/":
+            any_prefix_is_slash = True
+
+        rv.append((len(prefix), opt))
+
+    rv.sort(key=lambda x: x[0])
+    return ", ".join(x[1] for x in rv), any_prefix_is_slash
diff --git a/venv/lib/python3.9/site-packages/click/globals.py b/venv/lib/python3.9/site-packages/click/globals.py
new file mode 100644
index 0000000..480058f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/globals.py
@@ -0,0 +1,68 @@
+import typing as t
+from threading import local
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .core import Context
+
+_local = local()
+
+
+@t.overload
+def get_current_context(silent: "te.Literal[False]" = False) -> "Context":
+    ...
+
+
+@t.overload
+def get_current_context(silent: bool = ...) -> t.Optional["Context"]:
+    ...
+
+
+def get_current_context(silent: bool = False) -> t.Optional["Context"]:
+    """Returns the current click context.  This can be used as a way to
+    access the current context object from anywhere.  This is a more implicit
+    alternative to the :func:`pass_context` decorator.  This function is
+    primarily useful for helpers such as :func:`echo` which might be
+    interested in changing its behavior based on the current context.
+
+    To push the current context, :meth:`Context.scope` can be used.
+
+    .. versionadded:: 5.0
+
+    :param silent: if set to `True` the return value is `None` if no context
+                   is available.  The default behavior is to raise a
+                   :exc:`RuntimeError`.
+    """
+    try:
+        return t.cast("Context", _local.stack[-1])
+    except (AttributeError, IndexError) as e:
+        if not silent:
+            raise RuntimeError("There is no active click context.") from e
+
+    return None
+
+
+def push_context(ctx: "Context") -> None:
+    """Pushes a new context to the current stack."""
+    _local.__dict__.setdefault("stack", []).append(ctx)
+
+
+def pop_context() -> None:
+    """Removes the top level from the stack."""
+    _local.stack.pop()
+
+
+def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]:
+    """Internal helper to get the default value of the color flag.  If a
+    value is passed it's returned unchanged, otherwise it's looked up from
+    the current context.
+    """
+    if color is not None:
+        return color
+
+    ctx = get_current_context(silent=True)
+
+    if ctx is not None:
+        return ctx.color
+
+    return None
diff --git a/venv/lib/python3.9/site-packages/click/parser.py b/venv/lib/python3.9/site-packages/click/parser.py
new file mode 100644
index 0000000..2d5a2ed
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/parser.py
@@ -0,0 +1,529 @@
+"""
+This module started out as largely a copy paste from the stdlib's
+optparse module with the features removed that we do not need from
+optparse because we implement them in Click on a higher level (for
+instance type handling, help formatting and a lot more).
+
+The plan is to remove more and more from here over time.
+
+The reason this is a different module and not optparse from the stdlib
+is that there are differences in 2.x and 3.x about the error messages
+generated and optparse in the stdlib uses gettext for no good reason
+and might cause us issues.
+
+Click uses parts of optparse written by Gregory P. Ward and maintained
+by the Python Software Foundation. This is limited to code in parser.py.
+
+Copyright 2001-2006 Gregory P. Ward. All rights reserved.
+Copyright 2002-2006 Python Software Foundation. All rights reserved.
+"""
+# This code uses parts of optparse written by Gregory P. Ward and
+# maintained by the Python Software Foundation.
+# Copyright 2001-2006 Gregory P. Ward
+# Copyright 2002-2006 Python Software Foundation
+import typing as t
+from collections import deque
+from gettext import gettext as _
+from gettext import ngettext
+
+from .exceptions import BadArgumentUsage
+from .exceptions import BadOptionUsage
+from .exceptions import NoSuchOption
+from .exceptions import UsageError
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .core import Argument as CoreArgument
+    from .core import Context
+    from .core import Option as CoreOption
+    from .core import Parameter as CoreParameter
+
+V = t.TypeVar("V")
+
+# Sentinel value that indicates an option was passed as a flag without a
+# value but is not a flag option. Option.consume_value uses this to
+# prompt or use the flag_value.
+_flag_needs_value = object()
+
+
+def _unpack_args(
+    args: t.Sequence[str], nargs_spec: t.Sequence[int]
+) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]:
+    """Given an iterable of arguments and an iterable of nargs specifications,
+    it returns a tuple with all the unpacked arguments at the first index
+    and all remaining arguments as the second.
+
+    The nargs specification is the number of arguments that should be consumed
+    or `-1` to indicate that this position should eat up all the remainders.
+
+    Missing items are filled with `None`.
+    """
+    args = deque(args)
+    nargs_spec = deque(nargs_spec)
+    rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = []
+    spos: t.Optional[int] = None
+
+    def _fetch(c: "te.Deque[V]") -> t.Optional[V]:
+        try:
+            if spos is None:
+                return c.popleft()
+            else:
+                return c.pop()
+        except IndexError:
+            return None
+
+    while nargs_spec:
+        nargs = _fetch(nargs_spec)
+
+        if nargs is None:
+            continue
+
+        if nargs == 1:
+            rv.append(_fetch(args))
+        elif nargs > 1:
+            x = [_fetch(args) for _ in range(nargs)]
+
+            # If we're reversed, we're pulling in the arguments in reverse,
+            # so we need to turn them around.
+            if spos is not None:
+                x.reverse()
+
+            rv.append(tuple(x))
+        elif nargs < 0:
+            if spos is not None:
+                raise TypeError("Cannot have two nargs < 0")
+
+            spos = len(rv)
+            rv.append(None)
+
+    # spos is the position of the wildcard (star).  If it's not `None`,
+    # we fill it with the remainder.
+    if spos is not None:
+        rv[spos] = tuple(args)
+        args = []
+        rv[spos + 1 :] = reversed(rv[spos + 1 :])
+
+    return tuple(rv), list(args)
+
+
+def split_opt(opt: str) -> t.Tuple[str, str]:
+    first = opt[:1]
+    if first.isalnum():
+        return "", opt
+    if opt[1:2] == first:
+        return opt[:2], opt[2:]
+    return first, opt[1:]
+
+
+def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str:
+    if ctx is None or ctx.token_normalize_func is None:
+        return opt
+    prefix, opt = split_opt(opt)
+    return f"{prefix}{ctx.token_normalize_func(opt)}"
+
+
+def split_arg_string(string: str) -> t.List[str]:
+    """Split an argument string as with :func:`shlex.split`, but don't
+    fail if the string is incomplete. Ignores a missing closing quote or
+    incomplete escape sequence and uses the partial token as-is.
+
+    .. code-block:: python
+
+        split_arg_string("example 'my file")
+        ["example", "my file"]
+
+        split_arg_string("example my\\")
+        ["example", "my"]
+
+    :param string: String to split.
+    """
+    import shlex
+
+    lex = shlex.shlex(string, posix=True)
+    lex.whitespace_split = True
+    lex.commenters = ""
+    out = []
+
+    try:
+        for token in lex:
+            out.append(token)
+    except ValueError:
+        # Raised when end-of-string is reached in an invalid state. Use
+        # the partial token as-is. The quote or escape character is in
+        # lex.state, not lex.token.
+        out.append(lex.token)
+
+    return out
+
+
+class Option:
+    def __init__(
+        self,
+        obj: "CoreOption",
+        opts: t.Sequence[str],
+        dest: t.Optional[str],
+        action: t.Optional[str] = None,
+        nargs: int = 1,
+        const: t.Optional[t.Any] = None,
+    ):
+        self._short_opts = []
+        self._long_opts = []
+        self.prefixes = set()
+
+        for opt in opts:
+            prefix, value = split_opt(opt)
+            if not prefix:
+                raise ValueError(f"Invalid start character for option ({opt})")
+            self.prefixes.add(prefix[0])
+            if len(prefix) == 1 and len(value) == 1:
+                self._short_opts.append(opt)
+            else:
+                self._long_opts.append(opt)
+                self.prefixes.add(prefix)
+
+        if action is None:
+            action = "store"
+
+        self.dest = dest
+        self.action = action
+        self.nargs = nargs
+        self.const = const
+        self.obj = obj
+
+    @property
+    def takes_value(self) -> bool:
+        return self.action in ("store", "append")
+
+    def process(self, value: str, state: "ParsingState") -> None:
+        if self.action == "store":
+            state.opts[self.dest] = value  # type: ignore
+        elif self.action == "store_const":
+            state.opts[self.dest] = self.const  # type: ignore
+        elif self.action == "append":
+            state.opts.setdefault(self.dest, []).append(value)  # type: ignore
+        elif self.action == "append_const":
+            state.opts.setdefault(self.dest, []).append(self.const)  # type: ignore
+        elif self.action == "count":
+            state.opts[self.dest] = state.opts.get(self.dest, 0) + 1  # type: ignore
+        else:
+            raise ValueError(f"unknown action '{self.action}'")
+        state.order.append(self.obj)
+
+
+class Argument:
+    def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1):
+        self.dest = dest
+        self.nargs = nargs
+        self.obj = obj
+
+    def process(
+        self,
+        value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]],
+        state: "ParsingState",
+    ) -> None:
+        if self.nargs > 1:
+            assert value is not None
+            holes = sum(1 for x in value if x is None)
+            if holes == len(value):
+                value = None
+            elif holes != 0:
+                raise BadArgumentUsage(
+                    _("Argument {name!r} takes {nargs} values.").format(
+                        name=self.dest, nargs=self.nargs
+                    )
+                )
+
+        if self.nargs == -1 and self.obj.envvar is not None and value == ():
+            # Replace empty tuple with None so that a value from the
+            # environment may be tried.
+            value = None
+
+        state.opts[self.dest] = value  # type: ignore
+        state.order.append(self.obj)
+
+
+class ParsingState:
+    def __init__(self, rargs: t.List[str]) -> None:
+        self.opts: t.Dict[str, t.Any] = {}
+        self.largs: t.List[str] = []
+        self.rargs = rargs
+        self.order: t.List["CoreParameter"] = []
+
+
+class OptionParser:
+    """The option parser is an internal class that is ultimately used to
+    parse options and arguments.  It's modelled after optparse and brings
+    a similar but vastly simplified API.  It should generally not be used
+    directly as the high level Click classes wrap it for you.
+
+    It's not nearly as extensible as optparse or argparse as it does not
+    implement features that are implemented on a higher level (such as
+    types or defaults).
+
+    :param ctx: optionally the :class:`~click.Context` where this parser
+                should go with.
+    """
+
+    def __init__(self, ctx: t.Optional["Context"] = None) -> None:
+        #: The :class:`~click.Context` for this parser.  This might be
+        #: `None` for some advanced use cases.
+        self.ctx = ctx
+        #: This controls how the parser deals with interspersed arguments.
+        #: If this is set to `False`, the parser will stop on the first
+        #: non-option.  Click uses this to implement nested subcommands
+        #: safely.
+        self.allow_interspersed_args = True
+        #: This tells the parser how to deal with unknown options.  By
+        #: default it will error out (which is sensible), but there is a
+        #: second mode where it will ignore it and continue processing
+        #: after shifting all the unknown options into the resulting args.
+        self.ignore_unknown_options = False
+
+        if ctx is not None:
+            self.allow_interspersed_args = ctx.allow_interspersed_args
+            self.ignore_unknown_options = ctx.ignore_unknown_options
+
+        self._short_opt: t.Dict[str, Option] = {}
+        self._long_opt: t.Dict[str, Option] = {}
+        self._opt_prefixes = {"-", "--"}
+        self._args: t.List[Argument] = []
+
+    def add_option(
+        self,
+        obj: "CoreOption",
+        opts: t.Sequence[str],
+        dest: t.Optional[str],
+        action: t.Optional[str] = None,
+        nargs: int = 1,
+        const: t.Optional[t.Any] = None,
+    ) -> None:
+        """Adds a new option named `dest` to the parser.  The destination
+        is not inferred (unlike with optparse) and needs to be explicitly
+        provided.  Action can be any of ``store``, ``store_const``,
+        ``append``, ``append_const`` or ``count``.
+
+        The `obj` can be used to identify the option in the order list
+        that is returned from the parser.
+        """
+        opts = [normalize_opt(opt, self.ctx) for opt in opts]
+        option = Option(obj, opts, dest, action=action, nargs=nargs, const=const)
+        self._opt_prefixes.update(option.prefixes)
+        for opt in option._short_opts:
+            self._short_opt[opt] = option
+        for opt in option._long_opts:
+            self._long_opt[opt] = option
+
+    def add_argument(
+        self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1
+    ) -> None:
+        """Adds a positional argument named `dest` to the parser.
+
+        The `obj` can be used to identify the option in the order list
+        that is returned from the parser.
+        """
+        self._args.append(Argument(obj, dest=dest, nargs=nargs))
+
+    def parse_args(
+        self, args: t.List[str]
+    ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]:
+        """Parses positional arguments and returns ``(values, args, order)``
+        for the parsed options and arguments as well as the leftover
+        arguments if there are any.  The order is a list of objects as they
+        appear on the command line.  If arguments appear multiple times they
+        will be memorized multiple times as well.
+        """
+        state = ParsingState(args)
+        try:
+            self._process_args_for_options(state)
+            self._process_args_for_args(state)
+        except UsageError:
+            if self.ctx is None or not self.ctx.resilient_parsing:
+                raise
+        return state.opts, state.largs, state.order
+
+    def _process_args_for_args(self, state: ParsingState) -> None:
+        pargs, args = _unpack_args(
+            state.largs + state.rargs, [x.nargs for x in self._args]
+        )
+
+        for idx, arg in enumerate(self._args):
+            arg.process(pargs[idx], state)
+
+        state.largs = args
+        state.rargs = []
+
+    def _process_args_for_options(self, state: ParsingState) -> None:
+        while state.rargs:
+            arg = state.rargs.pop(0)
+            arglen = len(arg)
+            # Double dashes always handled explicitly regardless of what
+            # prefixes are valid.
+            if arg == "--":
+                return
+            elif arg[:1] in self._opt_prefixes and arglen > 1:
+                self._process_opts(arg, state)
+            elif self.allow_interspersed_args:
+                state.largs.append(arg)
+            else:
+                state.rargs.insert(0, arg)
+                return
+
+        # Say this is the original argument list:
+        # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
+        #                            ^
+        # (we are about to process arg(i)).
+        #
+        # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
+        # [arg0, ..., arg(i-1)] (any options and their arguments will have
+        # been removed from largs).
+        #
+        # The while loop will usually consume 1 or more arguments per pass.
+        # If it consumes 1 (eg. arg is an option that takes no arguments),
+        # then after _process_arg() is done the situation is:
+        #
+        #   largs = subset of [arg0, ..., arg(i)]
+        #   rargs = [arg(i+1), ..., arg(N-1)]
+        #
+        # If allow_interspersed_args is false, largs will always be
+        # *empty* -- still a subset of [arg0, ..., arg(i-1)], but
+        # not a very interesting subset!
+
+    def _match_long_opt(
+        self, opt: str, explicit_value: t.Optional[str], state: ParsingState
+    ) -> None:
+        if opt not in self._long_opt:
+            from difflib import get_close_matches
+
+            possibilities = get_close_matches(opt, self._long_opt)
+            raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx)
+
+        option = self._long_opt[opt]
+        if option.takes_value:
+            # At this point it's safe to modify rargs by injecting the
+            # explicit value, because no exception is raised in this
+            # branch.  This means that the inserted value will be fully
+            # consumed.
+            if explicit_value is not None:
+                state.rargs.insert(0, explicit_value)
+
+            value = self._get_value_from_state(opt, option, state)
+
+        elif explicit_value is not None:
+            raise BadOptionUsage(
+                opt, _("Option {name!r} does not take a value.").format(name=opt)
+            )
+
+        else:
+            value = None
+
+        option.process(value, state)
+
+    def _match_short_opt(self, arg: str, state: ParsingState) -> None:
+        stop = False
+        i = 1
+        prefix = arg[0]
+        unknown_options = []
+
+        for ch in arg[1:]:
+            opt = normalize_opt(f"{prefix}{ch}", self.ctx)
+            option = self._short_opt.get(opt)
+            i += 1
+
+            if not option:
+                if self.ignore_unknown_options:
+                    unknown_options.append(ch)
+                    continue
+                raise NoSuchOption(opt, ctx=self.ctx)
+            if option.takes_value:
+                # Any characters left in arg?  Pretend they're the
+                # next arg, and stop consuming characters of arg.
+                if i < len(arg):
+                    state.rargs.insert(0, arg[i:])
+                    stop = True
+
+                value = self._get_value_from_state(opt, option, state)
+
+            else:
+                value = None
+
+            option.process(value, state)
+
+            if stop:
+                break
+
+        # If we got any unknown options we re-combinate the string of the
+        # remaining options and re-attach the prefix, then report that
+        # to the state as new larg.  This way there is basic combinatorics
+        # that can be achieved while still ignoring unknown arguments.
+        if self.ignore_unknown_options and unknown_options:
+            state.largs.append(f"{prefix}{''.join(unknown_options)}")
+
+    def _get_value_from_state(
+        self, option_name: str, option: Option, state: ParsingState
+    ) -> t.Any:
+        nargs = option.nargs
+
+        if len(state.rargs) < nargs:
+            if option.obj._flag_needs_value:
+                # Option allows omitting the value.
+                value = _flag_needs_value
+            else:
+                raise BadOptionUsage(
+                    option_name,
+                    ngettext(
+                        "Option {name!r} requires an argument.",
+                        "Option {name!r} requires {nargs} arguments.",
+                        nargs,
+                    ).format(name=option_name, nargs=nargs),
+                )
+        elif nargs == 1:
+            next_rarg = state.rargs[0]
+
+            if (
+                option.obj._flag_needs_value
+                and isinstance(next_rarg, str)
+                and next_rarg[:1] in self._opt_prefixes
+                and len(next_rarg) > 1
+            ):
+                # The next arg looks like the start of an option, don't
+                # use it as the value if omitting the value is allowed.
+                value = _flag_needs_value
+            else:
+                value = state.rargs.pop(0)
+        else:
+            value = tuple(state.rargs[:nargs])
+            del state.rargs[:nargs]
+
+        return value
+
+    def _process_opts(self, arg: str, state: ParsingState) -> None:
+        explicit_value = None
+        # Long option handling happens in two parts.  The first part is
+        # supporting explicitly attached values.  In any case, we will try
+        # to long match the option first.
+        if "=" in arg:
+            long_opt, explicit_value = arg.split("=", 1)
+        else:
+            long_opt = arg
+        norm_long_opt = normalize_opt(long_opt, self.ctx)
+
+        # At this point we will match the (assumed) long option through
+        # the long option matching code.  Note that this allows options
+        # like "-foo" to be matched as long options.
+        try:
+            self._match_long_opt(norm_long_opt, explicit_value, state)
+        except NoSuchOption:
+            # At this point the long option matching failed, and we need
+            # to try with short options.  However there is a special rule
+            # which says, that if we have a two character options prefix
+            # (applies to "--foo" for instance), we do not dispatch to the
+            # short option code and will instead raise the no option
+            # error.
+            if arg[:2] not in self._opt_prefixes:
+                self._match_short_opt(arg, state)
+                return
+
+            if not self.ignore_unknown_options:
+                raise
+
+            state.largs.append(arg)
diff --git a/venv/lib/python3.9/site-packages/click/py.typed b/venv/lib/python3.9/site-packages/click/py.typed
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/py.typed
diff --git a/venv/lib/python3.9/site-packages/click/shell_completion.py b/venv/lib/python3.9/site-packages/click/shell_completion.py
new file mode 100644
index 0000000..c17a8e6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/shell_completion.py
@@ -0,0 +1,580 @@
+import os
+import re
+import typing as t
+from gettext import gettext as _
+
+from .core import Argument
+from .core import BaseCommand
+from .core import Context
+from .core import MultiCommand
+from .core import Option
+from .core import Parameter
+from .core import ParameterSource
+from .parser import split_arg_string
+from .utils import echo
+
+
+def shell_complete(
+    cli: BaseCommand,
+    ctx_args: t.Dict[str, t.Any],
+    prog_name: str,
+    complete_var: str,
+    instruction: str,
+) -> int:
+    """Perform shell completion for the given CLI program.
+
+    :param cli: Command being called.
+    :param ctx_args: Extra arguments to pass to
+        ``cli.make_context``.
+    :param prog_name: Name of the executable in the shell.
+    :param complete_var: Name of the environment variable that holds
+        the completion instruction.
+    :param instruction: Value of ``complete_var`` with the completion
+        instruction and shell, in the form ``instruction_shell``.
+    :return: Status code to exit with.
+    """
+    shell, _, instruction = instruction.partition("_")
+    comp_cls = get_completion_class(shell)
+
+    if comp_cls is None:
+        return 1
+
+    comp = comp_cls(cli, ctx_args, prog_name, complete_var)
+
+    if instruction == "source":
+        echo(comp.source())
+        return 0
+
+    if instruction == "complete":
+        echo(comp.complete())
+        return 0
+
+    return 1
+
+
+class CompletionItem:
+    """Represents a completion value and metadata about the value. The
+    default metadata is ``type`` to indicate special shell handling,
+    and ``help`` if a shell supports showing a help string next to the
+    value.
+
+    Arbitrary parameters can be passed when creating the object, and
+    accessed using ``item.attr``. If an attribute wasn't passed,
+    accessing it returns ``None``.
+
+    :param value: The completion suggestion.
+    :param type: Tells the shell script to provide special completion
+        support for the type. Click uses ``"dir"`` and ``"file"``.
+    :param help: String shown next to the value if supported.
+    :param kwargs: Arbitrary metadata. The built-in implementations
+        don't use this, but custom type completions paired with custom
+        shell support could use it.
+    """
+
+    __slots__ = ("value", "type", "help", "_info")
+
+    def __init__(
+        self,
+        value: t.Any,
+        type: str = "plain",
+        help: t.Optional[str] = None,
+        **kwargs: t.Any,
+    ) -> None:
+        self.value = value
+        self.type = type
+        self.help = help
+        self._info = kwargs
+
+    def __getattr__(self, name: str) -> t.Any:
+        return self._info.get(name)
+
+
+# Only Bash >= 4.4 has the nosort option.
+_SOURCE_BASH = """\
+%(complete_func)s() {
+    local IFS=$'\\n'
+    local response
+
+    response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \
+%(complete_var)s=bash_complete $1)
+
+    for completion in $response; do
+        IFS=',' read type value <<< "$completion"
+
+        if [[ $type == 'dir' ]]; then
+            COMPREPLY=()
+            compopt -o dirnames
+        elif [[ $type == 'file' ]]; then
+            COMPREPLY=()
+            compopt -o default
+        elif [[ $type == 'plain' ]]; then
+            COMPREPLY+=($value)
+        fi
+    done
+
+    return 0
+}
+
+%(complete_func)s_setup() {
+    complete -o nosort -F %(complete_func)s %(prog_name)s
+}
+
+%(complete_func)s_setup;
+"""
+
+_SOURCE_ZSH = """\
+#compdef %(prog_name)s
+
+%(complete_func)s() {
+    local -a completions
+    local -a completions_with_descriptions
+    local -a response
+    (( ! $+commands[%(prog_name)s] )) && return 1
+
+    response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \
+%(complete_var)s=zsh_complete %(prog_name)s)}")
+
+    for type key descr in ${response}; do
+        if [[ "$type" == "plain" ]]; then
+            if [[ "$descr" == "_" ]]; then
+                completions+=("$key")
+            else
+                completions_with_descriptions+=("$key":"$descr")
+            fi
+        elif [[ "$type" == "dir" ]]; then
+            _path_files -/
+        elif [[ "$type" == "file" ]]; then
+            _path_files -f
+        fi
+    done
+
+    if [ -n "$completions_with_descriptions" ]; then
+        _describe -V unsorted completions_with_descriptions -U
+    fi
+
+    if [ -n "$completions" ]; then
+        compadd -U -V unsorted -a completions
+    fi
+}
+
+compdef %(complete_func)s %(prog_name)s;
+"""
+
+_SOURCE_FISH = """\
+function %(complete_func)s;
+    set -l response;
+
+    for value in (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \
+COMP_CWORD=(commandline -t) %(prog_name)s);
+        set response $response $value;
+    end;
+
+    for completion in $response;
+        set -l metadata (string split "," $completion);
+
+        if test $metadata[1] = "dir";
+            __fish_complete_directories $metadata[2];
+        else if test $metadata[1] = "file";
+            __fish_complete_path $metadata[2];
+        else if test $metadata[1] = "plain";
+            echo $metadata[2];
+        end;
+    end;
+end;
+
+complete --no-files --command %(prog_name)s --arguments \
+"(%(complete_func)s)";
+"""
+
+
+class ShellComplete:
+    """Base class for providing shell completion support. A subclass for
+    a given shell will override attributes and methods to implement the
+    completion instructions (``source`` and ``complete``).
+
+    :param cli: Command being called.
+    :param prog_name: Name of the executable in the shell.
+    :param complete_var: Name of the environment variable that holds
+        the completion instruction.
+
+    .. versionadded:: 8.0
+    """
+
+    name: t.ClassVar[str]
+    """Name to register the shell as with :func:`add_completion_class`.
+    This is used in completion instructions (``{name}_source`` and
+    ``{name}_complete``).
+    """
+
+    source_template: t.ClassVar[str]
+    """Completion script template formatted by :meth:`source`. This must
+    be provided by subclasses.
+    """
+
+    def __init__(
+        self,
+        cli: BaseCommand,
+        ctx_args: t.Dict[str, t.Any],
+        prog_name: str,
+        complete_var: str,
+    ) -> None:
+        self.cli = cli
+        self.ctx_args = ctx_args
+        self.prog_name = prog_name
+        self.complete_var = complete_var
+
+    @property
+    def func_name(self) -> str:
+        """The name of the shell function defined by the completion
+        script.
+        """
+        safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), re.ASCII)
+        return f"_{safe_name}_completion"
+
+    def source_vars(self) -> t.Dict[str, t.Any]:
+        """Vars for formatting :attr:`source_template`.
+
+        By default this provides ``complete_func``, ``complete_var``,
+        and ``prog_name``.
+        """
+        return {
+            "complete_func": self.func_name,
+            "complete_var": self.complete_var,
+            "prog_name": self.prog_name,
+        }
+
+    def source(self) -> str:
+        """Produce the shell script that defines the completion
+        function. By default this ``%``-style formats
+        :attr:`source_template` with the dict returned by
+        :meth:`source_vars`.
+        """
+        return self.source_template % self.source_vars()
+
+    def get_completion_args(self) -> t.Tuple[t.List[str], str]:
+        """Use the env vars defined by the shell script to return a
+        tuple of ``args, incomplete``. This must be implemented by
+        subclasses.
+        """
+        raise NotImplementedError
+
+    def get_completions(
+        self, args: t.List[str], incomplete: str
+    ) -> t.List[CompletionItem]:
+        """Determine the context and last complete command or parameter
+        from the complete args. Call that object's ``shell_complete``
+        method to get the completions for the incomplete value.
+
+        :param args: List of complete args before the incomplete value.
+        :param incomplete: Value being completed. May be empty.
+        """
+        ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args)
+        obj, incomplete = _resolve_incomplete(ctx, args, incomplete)
+        return obj.shell_complete(ctx, incomplete)
+
+    def format_completion(self, item: CompletionItem) -> str:
+        """Format a completion item into the form recognized by the
+        shell script. This must be implemented by subclasses.
+
+        :param item: Completion item to format.
+        """
+        raise NotImplementedError
+
+    def complete(self) -> str:
+        """Produce the completion data to send back to the shell.
+
+        By default this calls :meth:`get_completion_args`, gets the
+        completions, then calls :meth:`format_completion` for each
+        completion.
+        """
+        args, incomplete = self.get_completion_args()
+        completions = self.get_completions(args, incomplete)
+        out = [self.format_completion(item) for item in completions]
+        return "\n".join(out)
+
+
+class BashComplete(ShellComplete):
+    """Shell completion for Bash."""
+
+    name = "bash"
+    source_template = _SOURCE_BASH
+
+    def _check_version(self) -> None:
+        import subprocess
+
+        output = subprocess.run(
+            ["bash", "-c", "echo ${BASH_VERSION}"], stdout=subprocess.PIPE
+        )
+        match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode())
+
+        if match is not None:
+            major, minor = match.groups()
+
+            if major < "4" or major == "4" and minor < "4":
+                raise RuntimeError(
+                    _(
+                        "Shell completion is not supported for Bash"
+                        " versions older than 4.4."
+                    )
+                )
+        else:
+            raise RuntimeError(
+                _("Couldn't detect Bash version, shell completion is not supported.")
+            )
+
+    def source(self) -> str:
+        self._check_version()
+        return super().source()
+
+    def get_completion_args(self) -> t.Tuple[t.List[str], str]:
+        cwords = split_arg_string(os.environ["COMP_WORDS"])
+        cword = int(os.environ["COMP_CWORD"])
+        args = cwords[1:cword]
+
+        try:
+            incomplete = cwords[cword]
+        except IndexError:
+            incomplete = ""
+
+        return args, incomplete
+
+    def format_completion(self, item: CompletionItem) -> str:
+        return f"{item.type},{item.value}"
+
+
+class ZshComplete(ShellComplete):
+    """Shell completion for Zsh."""
+
+    name = "zsh"
+    source_template = _SOURCE_ZSH
+
+    def get_completion_args(self) -> t.Tuple[t.List[str], str]:
+        cwords = split_arg_string(os.environ["COMP_WORDS"])
+        cword = int(os.environ["COMP_CWORD"])
+        args = cwords[1:cword]
+
+        try:
+            incomplete = cwords[cword]
+        except IndexError:
+            incomplete = ""
+
+        return args, incomplete
+
+    def format_completion(self, item: CompletionItem) -> str:
+        return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}"
+
+
+class FishComplete(ShellComplete):
+    """Shell completion for Fish."""
+
+    name = "fish"
+    source_template = _SOURCE_FISH
+
+    def get_completion_args(self) -> t.Tuple[t.List[str], str]:
+        cwords = split_arg_string(os.environ["COMP_WORDS"])
+        incomplete = os.environ["COMP_CWORD"]
+        args = cwords[1:]
+
+        # Fish stores the partial word in both COMP_WORDS and
+        # COMP_CWORD, remove it from complete args.
+        if incomplete and args and args[-1] == incomplete:
+            args.pop()
+
+        return args, incomplete
+
+    def format_completion(self, item: CompletionItem) -> str:
+        if item.help:
+            return f"{item.type},{item.value}\t{item.help}"
+
+        return f"{item.type},{item.value}"
+
+
+_available_shells: t.Dict[str, t.Type[ShellComplete]] = {
+    "bash": BashComplete,
+    "fish": FishComplete,
+    "zsh": ZshComplete,
+}
+
+
+def add_completion_class(
+    cls: t.Type[ShellComplete], name: t.Optional[str] = None
+) -> None:
+    """Register a :class:`ShellComplete` subclass under the given name.
+    The name will be provided by the completion instruction environment
+    variable during completion.
+
+    :param cls: The completion class that will handle completion for the
+        shell.
+    :param name: Name to register the class under. Defaults to the
+        class's ``name`` attribute.
+    """
+    if name is None:
+        name = cls.name
+
+    _available_shells[name] = cls
+
+
+def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]:
+    """Look up a registered :class:`ShellComplete` subclass by the name
+    provided by the completion instruction environment variable. If the
+    name isn't registered, returns ``None``.
+
+    :param shell: Name the class is registered under.
+    """
+    return _available_shells.get(shell)
+
+
+def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool:
+    """Determine if the given parameter is an argument that can still
+    accept values.
+
+    :param ctx: Invocation context for the command represented by the
+        parsed complete args.
+    :param param: Argument object being checked.
+    """
+    if not isinstance(param, Argument):
+        return False
+
+    assert param.name is not None
+    value = ctx.params[param.name]
+    return (
+        param.nargs == -1
+        or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE
+        or (
+            param.nargs > 1
+            and isinstance(value, (tuple, list))
+            and len(value) < param.nargs
+        )
+    )
+
+
+def _start_of_option(ctx: Context, value: str) -> bool:
+    """Check if the value looks like the start of an option."""
+    if not value:
+        return False
+
+    c = value[0]
+    return c in ctx._opt_prefixes
+
+
+def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool:
+    """Determine if the given parameter is an option that needs a value.
+
+    :param args: List of complete args before the incomplete value.
+    :param param: Option object being checked.
+    """
+    if not isinstance(param, Option):
+        return False
+
+    if param.is_flag or param.count:
+        return False
+
+    last_option = None
+
+    for index, arg in enumerate(reversed(args)):
+        if index + 1 > param.nargs:
+            break
+
+        if _start_of_option(ctx, arg):
+            last_option = arg
+
+    return last_option is not None and last_option in param.opts
+
+
+def _resolve_context(
+    cli: BaseCommand, ctx_args: t.Dict[str, t.Any], prog_name: str, args: t.List[str]
+) -> Context:
+    """Produce the context hierarchy starting with the command and
+    traversing the complete arguments. This only follows the commands,
+    it doesn't trigger input prompts or callbacks.
+
+    :param cli: Command being called.
+    :param prog_name: Name of the executable in the shell.
+    :param args: List of complete args before the incomplete value.
+    """
+    ctx_args["resilient_parsing"] = True
+    ctx = cli.make_context(prog_name, args.copy(), **ctx_args)
+    args = ctx.protected_args + ctx.args
+
+    while args:
+        command = ctx.command
+
+        if isinstance(command, MultiCommand):
+            if not command.chain:
+                name, cmd, args = command.resolve_command(ctx, args)
+
+                if cmd is None:
+                    return ctx
+
+                ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True)
+                args = ctx.protected_args + ctx.args
+            else:
+                while args:
+                    name, cmd, args = command.resolve_command(ctx, args)
+
+                    if cmd is None:
+                        return ctx
+
+                    sub_ctx = cmd.make_context(
+                        name,
+                        args,
+                        parent=ctx,
+                        allow_extra_args=True,
+                        allow_interspersed_args=False,
+                        resilient_parsing=True,
+                    )
+                    args = sub_ctx.args
+
+                ctx = sub_ctx
+                args = [*sub_ctx.protected_args, *sub_ctx.args]
+        else:
+            break
+
+    return ctx
+
+
+def _resolve_incomplete(
+    ctx: Context, args: t.List[str], incomplete: str
+) -> t.Tuple[t.Union[BaseCommand, Parameter], str]:
+    """Find the Click object that will handle the completion of the
+    incomplete value. Return the object and the incomplete value.
+
+    :param ctx: Invocation context for the command represented by
+        the parsed complete args.
+    :param args: List of complete args before the incomplete value.
+    :param incomplete: Value being completed. May be empty.
+    """
+    # Different shells treat an "=" between a long option name and
+    # value differently. Might keep the value joined, return the "="
+    # as a separate item, or return the split name and value. Always
+    # split and discard the "=" to make completion easier.
+    if incomplete == "=":
+        incomplete = ""
+    elif "=" in incomplete and _start_of_option(ctx, incomplete):
+        name, _, incomplete = incomplete.partition("=")
+        args.append(name)
+
+    # The "--" marker tells Click to stop treating values as options
+    # even if they start with the option character. If it hasn't been
+    # given and the incomplete arg looks like an option, the current
+    # command will provide option name completions.
+    if "--" not in args and _start_of_option(ctx, incomplete):
+        return ctx.command, incomplete
+
+    params = ctx.command.get_params(ctx)
+
+    # If the last complete arg is an option name with an incomplete
+    # value, the option will provide value completions.
+    for param in params:
+        if _is_incomplete_option(ctx, args, param):
+            return param, incomplete
+
+    # It's not an option name or value. The first argument without a
+    # parsed value will provide value completions.
+    for param in params:
+        if _is_incomplete_argument(ctx, param):
+            return param, incomplete
+
+    # There were no unparsed arguments, the command may be a group that
+    # will provide command name completions.
+    return ctx.command, incomplete
diff --git a/venv/lib/python3.9/site-packages/click/termui.py b/venv/lib/python3.9/site-packages/click/termui.py
new file mode 100644
index 0000000..bfb2f5a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/termui.py
@@ -0,0 +1,787 @@
+import inspect
+import io
+import itertools
+import os
+import sys
+import typing as t
+from gettext import gettext as _
+
+from ._compat import isatty
+from ._compat import strip_ansi
+from ._compat import WIN
+from .exceptions import Abort
+from .exceptions import UsageError
+from .globals import resolve_color_default
+from .types import Choice
+from .types import convert_type
+from .types import ParamType
+from .utils import echo
+from .utils import LazyFile
+
+if t.TYPE_CHECKING:
+    from ._termui_impl import ProgressBar
+
+V = t.TypeVar("V")
+
+# The prompt functions to use.  The doc tools currently override these
+# functions to customize how they work.
+visible_prompt_func: t.Callable[[str], str] = input
+
+_ansi_colors = {
+    "black": 30,
+    "red": 31,
+    "green": 32,
+    "yellow": 33,
+    "blue": 34,
+    "magenta": 35,
+    "cyan": 36,
+    "white": 37,
+    "reset": 39,
+    "bright_black": 90,
+    "bright_red": 91,
+    "bright_green": 92,
+    "bright_yellow": 93,
+    "bright_blue": 94,
+    "bright_magenta": 95,
+    "bright_cyan": 96,
+    "bright_white": 97,
+}
+_ansi_reset_all = "\033[0m"
+
+
+def hidden_prompt_func(prompt: str) -> str:
+    import getpass
+
+    return getpass.getpass(prompt)
+
+
+def _build_prompt(
+    text: str,
+    suffix: str,
+    show_default: bool = False,
+    default: t.Optional[t.Any] = None,
+    show_choices: bool = True,
+    type: t.Optional[ParamType] = None,
+) -> str:
+    prompt = text
+    if type is not None and show_choices and isinstance(type, Choice):
+        prompt += f" ({', '.join(map(str, type.choices))})"
+    if default is not None and show_default:
+        prompt = f"{prompt} [{_format_default(default)}]"
+    return f"{prompt}{suffix}"
+
+
+def _format_default(default: t.Any) -> t.Any:
+    if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"):
+        return default.name  # type: ignore
+
+    return default
+
+
+def prompt(
+    text: str,
+    default: t.Optional[t.Any] = None,
+    hide_input: bool = False,
+    confirmation_prompt: t.Union[bool, str] = False,
+    type: t.Optional[t.Union[ParamType, t.Any]] = None,
+    value_proc: t.Optional[t.Callable[[str], t.Any]] = None,
+    prompt_suffix: str = ": ",
+    show_default: bool = True,
+    err: bool = False,
+    show_choices: bool = True,
+) -> t.Any:
+    """Prompts a user for input.  This is a convenience function that can
+    be used to prompt a user for input later.
+
+    If the user aborts the input by sending an interrupt signal, this
+    function will catch it and raise a :exc:`Abort` exception.
+
+    :param text: the text to show for the prompt.
+    :param default: the default value to use if no input happens.  If this
+                    is not given it will prompt until it's aborted.
+    :param hide_input: if this is set to true then the input value will
+                       be hidden.
+    :param confirmation_prompt: Prompt a second time to confirm the
+        value. Can be set to a string instead of ``True`` to customize
+        the message.
+    :param type: the type to use to check the value against.
+    :param value_proc: if this parameter is provided it's a function that
+                       is invoked instead of the type conversion to
+                       convert a value.
+    :param prompt_suffix: a suffix that should be added to the prompt.
+    :param show_default: shows or hides the default value in the prompt.
+    :param err: if set to true the file defaults to ``stderr`` instead of
+                ``stdout``, the same as with echo.
+    :param show_choices: Show or hide choices if the passed type is a Choice.
+                         For example if type is a Choice of either day or week,
+                         show_choices is true and text is "Group by" then the
+                         prompt will be "Group by (day, week): ".
+
+    .. versionadded:: 8.0
+        ``confirmation_prompt`` can be a custom string.
+
+    .. versionadded:: 7.0
+        Added the ``show_choices`` parameter.
+
+    .. versionadded:: 6.0
+        Added unicode support for cmd.exe on Windows.
+
+    .. versionadded:: 4.0
+        Added the `err` parameter.
+
+    """
+
+    def prompt_func(text: str) -> str:
+        f = hidden_prompt_func if hide_input else visible_prompt_func
+        try:
+            # Write the prompt separately so that we get nice
+            # coloring through colorama on Windows
+            echo(text.rstrip(" "), nl=False, err=err)
+            # Echo a space to stdout to work around an issue where
+            # readline causes backspace to clear the whole line.
+            return f(" ")
+        except (KeyboardInterrupt, EOFError):
+            # getpass doesn't print a newline if the user aborts input with ^C.
+            # Allegedly this behavior is inherited from getpass(3).
+            # A doc bug has been filed at https://bugs.python.org/issue24711
+            if hide_input:
+                echo(None, err=err)
+            raise Abort() from None
+
+    if value_proc is None:
+        value_proc = convert_type(type, default)
+
+    prompt = _build_prompt(
+        text, prompt_suffix, show_default, default, show_choices, type
+    )
+
+    if confirmation_prompt:
+        if confirmation_prompt is True:
+            confirmation_prompt = _("Repeat for confirmation")
+
+        confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix)
+
+    while True:
+        while True:
+            value = prompt_func(prompt)
+            if value:
+                break
+            elif default is not None:
+                value = default
+                break
+        try:
+            result = value_proc(value)
+        except UsageError as e:
+            if hide_input:
+                echo(_("Error: The value you entered was invalid."), err=err)
+            else:
+                echo(_("Error: {e.message}").format(e=e), err=err)  # noqa: B306
+            continue
+        if not confirmation_prompt:
+            return result
+        while True:
+            value2 = prompt_func(confirmation_prompt)
+            is_empty = not value and not value2
+            if value2 or is_empty:
+                break
+        if value == value2:
+            return result
+        echo(_("Error: The two entered values do not match."), err=err)
+
+
+def confirm(
+    text: str,
+    default: t.Optional[bool] = False,
+    abort: bool = False,
+    prompt_suffix: str = ": ",
+    show_default: bool = True,
+    err: bool = False,
+) -> bool:
+    """Prompts for confirmation (yes/no question).
+
+    If the user aborts the input by sending a interrupt signal this
+    function will catch it and raise a :exc:`Abort` exception.
+
+    :param text: the question to ask.
+    :param default: The default value to use when no input is given. If
+        ``None``, repeat until input is given.
+    :param abort: if this is set to `True` a negative answer aborts the
+                  exception by raising :exc:`Abort`.
+    :param prompt_suffix: a suffix that should be added to the prompt.
+    :param show_default: shows or hides the default value in the prompt.
+    :param err: if set to true the file defaults to ``stderr`` instead of
+                ``stdout``, the same as with echo.
+
+    .. versionchanged:: 8.0
+        Repeat until input is given if ``default`` is ``None``.
+
+    .. versionadded:: 4.0
+        Added the ``err`` parameter.
+    """
+    prompt = _build_prompt(
+        text,
+        prompt_suffix,
+        show_default,
+        "y/n" if default is None else ("Y/n" if default else "y/N"),
+    )
+
+    while True:
+        try:
+            # Write the prompt separately so that we get nice
+            # coloring through colorama on Windows
+            echo(prompt.rstrip(" "), nl=False, err=err)
+            # Echo a space to stdout to work around an issue where
+            # readline causes backspace to clear the whole line.
+            value = visible_prompt_func(" ").lower().strip()
+        except (KeyboardInterrupt, EOFError):
+            raise Abort() from None
+        if value in ("y", "yes"):
+            rv = True
+        elif value in ("n", "no"):
+            rv = False
+        elif default is not None and value == "":
+            rv = default
+        else:
+            echo(_("Error: invalid input"), err=err)
+            continue
+        break
+    if abort and not rv:
+        raise Abort()
+    return rv
+
+
+def echo_via_pager(
+    text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str],
+    color: t.Optional[bool] = None,
+) -> None:
+    """This function takes a text and shows it via an environment specific
+    pager on stdout.
+
+    .. versionchanged:: 3.0
+       Added the `color` flag.
+
+    :param text_or_generator: the text to page, or alternatively, a
+                              generator emitting the text to page.
+    :param color: controls if the pager supports ANSI colors or not.  The
+                  default is autodetection.
+    """
+    color = resolve_color_default(color)
+
+    if inspect.isgeneratorfunction(text_or_generator):
+        i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)()
+    elif isinstance(text_or_generator, str):
+        i = [text_or_generator]
+    else:
+        i = iter(t.cast(t.Iterable[str], text_or_generator))
+
+    # convert every element of i to a text type if necessary
+    text_generator = (el if isinstance(el, str) else str(el) for el in i)
+
+    from ._termui_impl import pager
+
+    return pager(itertools.chain(text_generator, "\n"), color)
+
+
+def progressbar(
+    iterable: t.Optional[t.Iterable[V]] = None,
+    length: t.Optional[int] = None,
+    label: t.Optional[str] = None,
+    show_eta: bool = True,
+    show_percent: t.Optional[bool] = None,
+    show_pos: bool = False,
+    item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None,
+    fill_char: str = "#",
+    empty_char: str = "-",
+    bar_template: str = "%(label)s  [%(bar)s]  %(info)s",
+    info_sep: str = "  ",
+    width: int = 36,
+    file: t.Optional[t.TextIO] = None,
+    color: t.Optional[bool] = None,
+    update_min_steps: int = 1,
+) -> "ProgressBar[V]":
+    """This function creates an iterable context manager that can be used
+    to iterate over something while showing a progress bar.  It will
+    either iterate over the `iterable` or `length` items (that are counted
+    up).  While iteration happens, this function will print a rendered
+    progress bar to the given `file` (defaults to stdout) and will attempt
+    to calculate remaining time and more.  By default, this progress bar
+    will not be rendered if the file is not a terminal.
+
+    The context manager creates the progress bar.  When the context
+    manager is entered the progress bar is already created.  With every
+    iteration over the progress bar, the iterable passed to the bar is
+    advanced and the bar is updated.  When the context manager exits,
+    a newline is printed and the progress bar is finalized on screen.
+
+    Note: The progress bar is currently designed for use cases where the
+    total progress can be expected to take at least several seconds.
+    Because of this, the ProgressBar class object won't display
+    progress that is considered too fast, and progress where the time
+    between steps is less than a second.
+
+    No printing must happen or the progress bar will be unintentionally
+    destroyed.
+
+    Example usage::
+
+        with progressbar(items) as bar:
+            for item in bar:
+                do_something_with(item)
+
+    Alternatively, if no iterable is specified, one can manually update the
+    progress bar through the `update()` method instead of directly
+    iterating over the progress bar.  The update method accepts the number
+    of steps to increment the bar with::
+
+        with progressbar(length=chunks.total_bytes) as bar:
+            for chunk in chunks:
+                process_chunk(chunk)
+                bar.update(chunks.bytes)
+
+    The ``update()`` method also takes an optional value specifying the
+    ``current_item`` at the new position. This is useful when used
+    together with ``item_show_func`` to customize the output for each
+    manual step::
+
+        with click.progressbar(
+            length=total_size,
+            label='Unzipping archive',
+            item_show_func=lambda a: a.filename
+        ) as bar:
+            for archive in zip_file:
+                archive.extract()
+                bar.update(archive.size, archive)
+
+    :param iterable: an iterable to iterate over.  If not provided the length
+                     is required.
+    :param length: the number of items to iterate over.  By default the
+                   progressbar will attempt to ask the iterator about its
+                   length, which might or might not work.  If an iterable is
+                   also provided this parameter can be used to override the
+                   length.  If an iterable is not provided the progress bar
+                   will iterate over a range of that length.
+    :param label: the label to show next to the progress bar.
+    :param show_eta: enables or disables the estimated time display.  This is
+                     automatically disabled if the length cannot be
+                     determined.
+    :param show_percent: enables or disables the percentage display.  The
+                         default is `True` if the iterable has a length or
+                         `False` if not.
+    :param show_pos: enables or disables the absolute position display.  The
+                     default is `False`.
+    :param item_show_func: A function called with the current item which
+        can return a string to show next to the progress bar. If the
+        function returns ``None`` nothing is shown. The current item can
+        be ``None``, such as when entering and exiting the bar.
+    :param fill_char: the character to use to show the filled part of the
+                      progress bar.
+    :param empty_char: the character to use to show the non-filled part of
+                       the progress bar.
+    :param bar_template: the format string to use as template for the bar.
+                         The parameters in it are ``label`` for the label,
+                         ``bar`` for the progress bar and ``info`` for the
+                         info section.
+    :param info_sep: the separator between multiple info items (eta etc.)
+    :param width: the width of the progress bar in characters, 0 means full
+                  terminal width
+    :param file: The file to write to. If this is not a terminal then
+        only the label is printed.
+    :param color: controls if the terminal supports ANSI colors or not.  The
+                  default is autodetection.  This is only needed if ANSI
+                  codes are included anywhere in the progress bar output
+                  which is not the case by default.
+    :param update_min_steps: Render only when this many updates have
+        completed. This allows tuning for very fast iterators.
+
+    .. versionchanged:: 8.0
+        Output is shown even if execution time is less than 0.5 seconds.
+
+    .. versionchanged:: 8.0
+        ``item_show_func`` shows the current item, not the previous one.
+
+    .. versionchanged:: 8.0
+        Labels are echoed if the output is not a TTY. Reverts a change
+        in 7.0 that removed all output.
+
+    .. versionadded:: 8.0
+       Added the ``update_min_steps`` parameter.
+
+    .. versionchanged:: 4.0
+        Added the ``color`` parameter. Added the ``update`` method to
+        the object.
+
+    .. versionadded:: 2.0
+    """
+    from ._termui_impl import ProgressBar
+
+    color = resolve_color_default(color)
+    return ProgressBar(
+        iterable=iterable,
+        length=length,
+        show_eta=show_eta,
+        show_percent=show_percent,
+        show_pos=show_pos,
+        item_show_func=item_show_func,
+        fill_char=fill_char,
+        empty_char=empty_char,
+        bar_template=bar_template,
+        info_sep=info_sep,
+        file=file,
+        label=label,
+        width=width,
+        color=color,
+        update_min_steps=update_min_steps,
+    )
+
+
+def clear() -> None:
+    """Clears the terminal screen.  This will have the effect of clearing
+    the whole visible space of the terminal and moving the cursor to the
+    top left.  This does not do anything if not connected to a terminal.
+
+    .. versionadded:: 2.0
+    """
+    if not isatty(sys.stdout):
+        return
+    if WIN:
+        os.system("cls")
+    else:
+        sys.stdout.write("\033[2J\033[1;1H")
+
+
+def _interpret_color(
+    color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0
+) -> str:
+    if isinstance(color, int):
+        return f"{38 + offset};5;{color:d}"
+
+    if isinstance(color, (tuple, list)):
+        r, g, b = color
+        return f"{38 + offset};2;{r:d};{g:d};{b:d}"
+
+    return str(_ansi_colors[color] + offset)
+
+
+def style(
+    text: t.Any,
+    fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None,
+    bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None,
+    bold: t.Optional[bool] = None,
+    dim: t.Optional[bool] = None,
+    underline: t.Optional[bool] = None,
+    overline: t.Optional[bool] = None,
+    italic: t.Optional[bool] = None,
+    blink: t.Optional[bool] = None,
+    reverse: t.Optional[bool] = None,
+    strikethrough: t.Optional[bool] = None,
+    reset: bool = True,
+) -> str:
+    """Styles a text with ANSI styles and returns the new string.  By
+    default the styling is self contained which means that at the end
+    of the string a reset code is issued.  This can be prevented by
+    passing ``reset=False``.
+
+    Examples::
+
+        click.echo(click.style('Hello World!', fg='green'))
+        click.echo(click.style('ATTENTION!', blink=True))
+        click.echo(click.style('Some things', reverse=True, fg='cyan'))
+        click.echo(click.style('More colors', fg=(255, 12, 128), bg=117))
+
+    Supported color names:
+
+    * ``black`` (might be a gray)
+    * ``red``
+    * ``green``
+    * ``yellow`` (might be an orange)
+    * ``blue``
+    * ``magenta``
+    * ``cyan``
+    * ``white`` (might be light gray)
+    * ``bright_black``
+    * ``bright_red``
+    * ``bright_green``
+    * ``bright_yellow``
+    * ``bright_blue``
+    * ``bright_magenta``
+    * ``bright_cyan``
+    * ``bright_white``
+    * ``reset`` (reset the color code only)
+
+    If the terminal supports it, color may also be specified as:
+
+    -   An integer in the interval [0, 255]. The terminal must support
+        8-bit/256-color mode.
+    -   An RGB tuple of three integers in [0, 255]. The terminal must
+        support 24-bit/true-color mode.
+
+    See https://en.wikipedia.org/wiki/ANSI_color and
+    https://gist.github.com/XVilka/8346728 for more information.
+
+    :param text: the string to style with ansi codes.
+    :param fg: if provided this will become the foreground color.
+    :param bg: if provided this will become the background color.
+    :param bold: if provided this will enable or disable bold mode.
+    :param dim: if provided this will enable or disable dim mode.  This is
+                badly supported.
+    :param underline: if provided this will enable or disable underline.
+    :param overline: if provided this will enable or disable overline.
+    :param italic: if provided this will enable or disable italic.
+    :param blink: if provided this will enable or disable blinking.
+    :param reverse: if provided this will enable or disable inverse
+                    rendering (foreground becomes background and the
+                    other way round).
+    :param strikethrough: if provided this will enable or disable
+        striking through text.
+    :param reset: by default a reset-all code is added at the end of the
+                  string which means that styles do not carry over.  This
+                  can be disabled to compose styles.
+
+    .. versionchanged:: 8.0
+        A non-string ``message`` is converted to a string.
+
+    .. versionchanged:: 8.0
+       Added support for 256 and RGB color codes.
+
+    .. versionchanged:: 8.0
+        Added the ``strikethrough``, ``italic``, and ``overline``
+        parameters.
+
+    .. versionchanged:: 7.0
+        Added support for bright colors.
+
+    .. versionadded:: 2.0
+    """
+    if not isinstance(text, str):
+        text = str(text)
+
+    bits = []
+
+    if fg:
+        try:
+            bits.append(f"\033[{_interpret_color(fg)}m")
+        except KeyError:
+            raise TypeError(f"Unknown color {fg!r}") from None
+
+    if bg:
+        try:
+            bits.append(f"\033[{_interpret_color(bg, 10)}m")
+        except KeyError:
+            raise TypeError(f"Unknown color {bg!r}") from None
+
+    if bold is not None:
+        bits.append(f"\033[{1 if bold else 22}m")
+    if dim is not None:
+        bits.append(f"\033[{2 if dim else 22}m")
+    if underline is not None:
+        bits.append(f"\033[{4 if underline else 24}m")
+    if overline is not None:
+        bits.append(f"\033[{53 if overline else 55}m")
+    if italic is not None:
+        bits.append(f"\033[{3 if italic else 23}m")
+    if blink is not None:
+        bits.append(f"\033[{5 if blink else 25}m")
+    if reverse is not None:
+        bits.append(f"\033[{7 if reverse else 27}m")
+    if strikethrough is not None:
+        bits.append(f"\033[{9 if strikethrough else 29}m")
+    bits.append(text)
+    if reset:
+        bits.append(_ansi_reset_all)
+    return "".join(bits)
+
+
+def unstyle(text: str) -> str:
+    """Removes ANSI styling information from a string.  Usually it's not
+    necessary to use this function as Click's echo function will
+    automatically remove styling if necessary.
+
+    .. versionadded:: 2.0
+
+    :param text: the text to remove style information from.
+    """
+    return strip_ansi(text)
+
+
+def secho(
+    message: t.Optional[t.Any] = None,
+    file: t.Optional[t.IO[t.AnyStr]] = None,
+    nl: bool = True,
+    err: bool = False,
+    color: t.Optional[bool] = None,
+    **styles: t.Any,
+) -> None:
+    """This function combines :func:`echo` and :func:`style` into one
+    call.  As such the following two calls are the same::
+
+        click.secho('Hello World!', fg='green')
+        click.echo(click.style('Hello World!', fg='green'))
+
+    All keyword arguments are forwarded to the underlying functions
+    depending on which one they go with.
+
+    Non-string types will be converted to :class:`str`. However,
+    :class:`bytes` are passed directly to :meth:`echo` without applying
+    style. If you want to style bytes that represent text, call
+    :meth:`bytes.decode` first.
+
+    .. versionchanged:: 8.0
+        A non-string ``message`` is converted to a string. Bytes are
+        passed through without style applied.
+
+    .. versionadded:: 2.0
+    """
+    if message is not None and not isinstance(message, (bytes, bytearray)):
+        message = style(message, **styles)
+
+    return echo(message, file=file, nl=nl, err=err, color=color)
+
+
+def edit(
+    text: t.Optional[t.AnyStr] = None,
+    editor: t.Optional[str] = None,
+    env: t.Optional[t.Mapping[str, str]] = None,
+    require_save: bool = True,
+    extension: str = ".txt",
+    filename: t.Optional[str] = None,
+) -> t.Optional[t.AnyStr]:
+    r"""Edits the given text in the defined editor.  If an editor is given
+    (should be the full path to the executable but the regular operating
+    system search path is used for finding the executable) it overrides
+    the detected editor.  Optionally, some environment variables can be
+    used.  If the editor is closed without changes, `None` is returned.  In
+    case a file is edited directly the return value is always `None` and
+    `require_save` and `extension` are ignored.
+
+    If the editor cannot be opened a :exc:`UsageError` is raised.
+
+    Note for Windows: to simplify cross-platform usage, the newlines are
+    automatically converted from POSIX to Windows and vice versa.  As such,
+    the message here will have ``\n`` as newline markers.
+
+    :param text: the text to edit.
+    :param editor: optionally the editor to use.  Defaults to automatic
+                   detection.
+    :param env: environment variables to forward to the editor.
+    :param require_save: if this is true, then not saving in the editor
+                         will make the return value become `None`.
+    :param extension: the extension to tell the editor about.  This defaults
+                      to `.txt` but changing this might change syntax
+                      highlighting.
+    :param filename: if provided it will edit this file instead of the
+                     provided text contents.  It will not use a temporary
+                     file as an indirection in that case.
+    """
+    from ._termui_impl import Editor
+
+    ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension)
+
+    if filename is None:
+        return ed.edit(text)
+
+    ed.edit_file(filename)
+    return None
+
+
+def launch(url: str, wait: bool = False, locate: bool = False) -> int:
+    """This function launches the given URL (or filename) in the default
+    viewer application for this file type.  If this is an executable, it
+    might launch the executable in a new session.  The return value is
+    the exit code of the launched application.  Usually, ``0`` indicates
+    success.
+
+    Examples::
+
+        click.launch('https://click.palletsprojects.com/')
+        click.launch('/my/downloaded/file', locate=True)
+
+    .. versionadded:: 2.0
+
+    :param url: URL or filename of the thing to launch.
+    :param wait: Wait for the program to exit before returning. This
+        only works if the launched program blocks. In particular,
+        ``xdg-open`` on Linux does not block.
+    :param locate: if this is set to `True` then instead of launching the
+                   application associated with the URL it will attempt to
+                   launch a file manager with the file located.  This
+                   might have weird effects if the URL does not point to
+                   the filesystem.
+    """
+    from ._termui_impl import open_url
+
+    return open_url(url, wait=wait, locate=locate)
+
+
+# If this is provided, getchar() calls into this instead.  This is used
+# for unittesting purposes.
+_getchar: t.Optional[t.Callable[[bool], str]] = None
+
+
+def getchar(echo: bool = False) -> str:
+    """Fetches a single character from the terminal and returns it.  This
+    will always return a unicode character and under certain rare
+    circumstances this might return more than one character.  The
+    situations which more than one character is returned is when for
+    whatever reason multiple characters end up in the terminal buffer or
+    standard input was not actually a terminal.
+
+    Note that this will always read from the terminal, even if something
+    is piped into the standard input.
+
+    Note for Windows: in rare cases when typing non-ASCII characters, this
+    function might wait for a second character and then return both at once.
+    This is because certain Unicode characters look like special-key markers.
+
+    .. versionadded:: 2.0
+
+    :param echo: if set to `True`, the character read will also show up on
+                 the terminal.  The default is to not show it.
+    """
+    global _getchar
+
+    if _getchar is None:
+        from ._termui_impl import getchar as f
+
+        _getchar = f
+
+    return _getchar(echo)
+
+
+def raw_terminal() -> t.ContextManager[int]:
+    from ._termui_impl import raw_terminal as f
+
+    return f()
+
+
+def pause(info: t.Optional[str] = None, err: bool = False) -> None:
+    """This command stops execution and waits for the user to press any
+    key to continue.  This is similar to the Windows batch "pause"
+    command.  If the program is not run through a terminal, this command
+    will instead do nothing.
+
+    .. versionadded:: 2.0
+
+    .. versionadded:: 4.0
+       Added the `err` parameter.
+
+    :param info: The message to print before pausing. Defaults to
+        ``"Press any key to continue..."``.
+    :param err: if set to message goes to ``stderr`` instead of
+                ``stdout``, the same as with echo.
+    """
+    if not isatty(sys.stdin) or not isatty(sys.stdout):
+        return
+
+    if info is None:
+        info = _("Press any key to continue...")
+
+    try:
+        if info:
+            echo(info, nl=False, err=err)
+        try:
+            getchar()
+        except (KeyboardInterrupt, EOFError):
+            pass
+    finally:
+        if info:
+            echo(err=err)
diff --git a/venv/lib/python3.9/site-packages/click/testing.py b/venv/lib/python3.9/site-packages/click/testing.py
new file mode 100644
index 0000000..e395c2e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/testing.py
@@ -0,0 +1,479 @@
+import contextlib
+import io
+import os
+import shlex
+import shutil
+import sys
+import tempfile
+import typing as t
+from types import TracebackType
+
+from . import formatting
+from . import termui
+from . import utils
+from ._compat import _find_binary_reader
+
+if t.TYPE_CHECKING:
+    from .core import BaseCommand
+
+
+class EchoingStdin:
+    def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None:
+        self._input = input
+        self._output = output
+        self._paused = False
+
+    def __getattr__(self, x: str) -> t.Any:
+        return getattr(self._input, x)
+
+    def _echo(self, rv: bytes) -> bytes:
+        if not self._paused:
+            self._output.write(rv)
+
+        return rv
+
+    def read(self, n: int = -1) -> bytes:
+        return self._echo(self._input.read(n))
+
+    def read1(self, n: int = -1) -> bytes:
+        return self._echo(self._input.read1(n))  # type: ignore
+
+    def readline(self, n: int = -1) -> bytes:
+        return self._echo(self._input.readline(n))
+
+    def readlines(self) -> t.List[bytes]:
+        return [self._echo(x) for x in self._input.readlines()]
+
+    def __iter__(self) -> t.Iterator[bytes]:
+        return iter(self._echo(x) for x in self._input)
+
+    def __repr__(self) -> str:
+        return repr(self._input)
+
+
+@contextlib.contextmanager
+def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]:
+    if stream is None:
+        yield
+    else:
+        stream._paused = True
+        yield
+        stream._paused = False
+
+
+class _NamedTextIOWrapper(io.TextIOWrapper):
+    def __init__(
+        self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any
+    ) -> None:
+        super().__init__(buffer, **kwargs)
+        self._name = name
+        self._mode = mode
+
+    @property
+    def name(self) -> str:
+        return self._name
+
+    @property
+    def mode(self) -> str:
+        return self._mode
+
+
+def make_input_stream(
+    input: t.Optional[t.Union[str, bytes, t.IO]], charset: str
+) -> t.BinaryIO:
+    # Is already an input stream.
+    if hasattr(input, "read"):
+        rv = _find_binary_reader(t.cast(t.IO, input))
+
+        if rv is not None:
+            return rv
+
+        raise TypeError("Could not find binary reader for input stream.")
+
+    if input is None:
+        input = b""
+    elif isinstance(input, str):
+        input = input.encode(charset)
+
+    return io.BytesIO(t.cast(bytes, input))
+
+
+class Result:
+    """Holds the captured result of an invoked CLI script."""
+
+    def __init__(
+        self,
+        runner: "CliRunner",
+        stdout_bytes: bytes,
+        stderr_bytes: t.Optional[bytes],
+        return_value: t.Any,
+        exit_code: int,
+        exception: t.Optional[BaseException],
+        exc_info: t.Optional[
+            t.Tuple[t.Type[BaseException], BaseException, TracebackType]
+        ] = None,
+    ):
+        #: The runner that created the result
+        self.runner = runner
+        #: The standard output as bytes.
+        self.stdout_bytes = stdout_bytes
+        #: The standard error as bytes, or None if not available
+        self.stderr_bytes = stderr_bytes
+        #: The value returned from the invoked command.
+        #:
+        #: .. versionadded:: 8.0
+        self.return_value = return_value
+        #: The exit code as integer.
+        self.exit_code = exit_code
+        #: The exception that happened if one did.
+        self.exception = exception
+        #: The traceback
+        self.exc_info = exc_info
+
+    @property
+    def output(self) -> str:
+        """The (standard) output as unicode string."""
+        return self.stdout
+
+    @property
+    def stdout(self) -> str:
+        """The standard output as unicode string."""
+        return self.stdout_bytes.decode(self.runner.charset, "replace").replace(
+            "\r\n", "\n"
+        )
+
+    @property
+    def stderr(self) -> str:
+        """The standard error as unicode string."""
+        if self.stderr_bytes is None:
+            raise ValueError("stderr not separately captured")
+        return self.stderr_bytes.decode(self.runner.charset, "replace").replace(
+            "\r\n", "\n"
+        )
+
+    def __repr__(self) -> str:
+        exc_str = repr(self.exception) if self.exception else "okay"
+        return f"<{type(self).__name__} {exc_str}>"
+
+
+class CliRunner:
+    """The CLI runner provides functionality to invoke a Click command line
+    script for unittesting purposes in a isolated environment.  This only
+    works in single-threaded systems without any concurrency as it changes the
+    global interpreter state.
+
+    :param charset: the character set for the input and output data.
+    :param env: a dictionary with environment variables for overriding.
+    :param echo_stdin: if this is set to `True`, then reading from stdin writes
+                       to stdout.  This is useful for showing examples in
+                       some circumstances.  Note that regular prompts
+                       will automatically echo the input.
+    :param mix_stderr: if this is set to `False`, then stdout and stderr are
+                       preserved as independent streams.  This is useful for
+                       Unix-philosophy apps that have predictable stdout and
+                       noisy stderr, such that each may be measured
+                       independently
+    """
+
+    def __init__(
+        self,
+        charset: str = "utf-8",
+        env: t.Optional[t.Mapping[str, t.Optional[str]]] = None,
+        echo_stdin: bool = False,
+        mix_stderr: bool = True,
+    ) -> None:
+        self.charset = charset
+        self.env = env or {}
+        self.echo_stdin = echo_stdin
+        self.mix_stderr = mix_stderr
+
+    def get_default_prog_name(self, cli: "BaseCommand") -> str:
+        """Given a command object it will return the default program name
+        for it.  The default is the `name` attribute or ``"root"`` if not
+        set.
+        """
+        return cli.name or "root"
+
+    def make_env(
+        self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None
+    ) -> t.Mapping[str, t.Optional[str]]:
+        """Returns the environment overrides for invoking a script."""
+        rv = dict(self.env)
+        if overrides:
+            rv.update(overrides)
+        return rv
+
+    @contextlib.contextmanager
+    def isolation(
+        self,
+        input: t.Optional[t.Union[str, bytes, t.IO]] = None,
+        env: t.Optional[t.Mapping[str, t.Optional[str]]] = None,
+        color: bool = False,
+    ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]:
+        """A context manager that sets up the isolation for invoking of a
+        command line tool.  This sets up stdin with the given input data
+        and `os.environ` with the overrides from the given dictionary.
+        This also rebinds some internals in Click to be mocked (like the
+        prompt functionality).
+
+        This is automatically done in the :meth:`invoke` method.
+
+        :param input: the input stream to put into sys.stdin.
+        :param env: the environment overrides as dictionary.
+        :param color: whether the output should contain color codes. The
+                      application can still override this explicitly.
+
+        .. versionchanged:: 8.0
+            ``stderr`` is opened with ``errors="backslashreplace"``
+            instead of the default ``"strict"``.
+
+        .. versionchanged:: 4.0
+            Added the ``color`` parameter.
+        """
+        bytes_input = make_input_stream(input, self.charset)
+        echo_input = None
+
+        old_stdin = sys.stdin
+        old_stdout = sys.stdout
+        old_stderr = sys.stderr
+        old_forced_width = formatting.FORCED_WIDTH
+        formatting.FORCED_WIDTH = 80
+
+        env = self.make_env(env)
+
+        bytes_output = io.BytesIO()
+
+        if self.echo_stdin:
+            bytes_input = echo_input = t.cast(
+                t.BinaryIO, EchoingStdin(bytes_input, bytes_output)
+            )
+
+        sys.stdin = text_input = _NamedTextIOWrapper(
+            bytes_input, encoding=self.charset, name="<stdin>", mode="r"
+        )
+
+        if self.echo_stdin:
+            # Force unbuffered reads, otherwise TextIOWrapper reads a
+            # large chunk which is echoed early.
+            text_input._CHUNK_SIZE = 1  # type: ignore
+
+        sys.stdout = _NamedTextIOWrapper(
+            bytes_output, encoding=self.charset, name="<stdout>", mode="w"
+        )
+
+        bytes_error = None
+        if self.mix_stderr:
+            sys.stderr = sys.stdout
+        else:
+            bytes_error = io.BytesIO()
+            sys.stderr = _NamedTextIOWrapper(
+                bytes_error,
+                encoding=self.charset,
+                name="<stderr>",
+                mode="w",
+                errors="backslashreplace",
+            )
+
+        @_pause_echo(echo_input)  # type: ignore
+        def visible_input(prompt: t.Optional[str] = None) -> str:
+            sys.stdout.write(prompt or "")
+            val = text_input.readline().rstrip("\r\n")
+            sys.stdout.write(f"{val}\n")
+            sys.stdout.flush()
+            return val
+
+        @_pause_echo(echo_input)  # type: ignore
+        def hidden_input(prompt: t.Optional[str] = None) -> str:
+            sys.stdout.write(f"{prompt or ''}\n")
+            sys.stdout.flush()
+            return text_input.readline().rstrip("\r\n")
+
+        @_pause_echo(echo_input)  # type: ignore
+        def _getchar(echo: bool) -> str:
+            char = sys.stdin.read(1)
+
+            if echo:
+                sys.stdout.write(char)
+
+            sys.stdout.flush()
+            return char
+
+        default_color = color
+
+        def should_strip_ansi(
+            stream: t.Optional[t.IO] = None, color: t.Optional[bool] = None
+        ) -> bool:
+            if color is None:
+                return not default_color
+            return not color
+
+        old_visible_prompt_func = termui.visible_prompt_func
+        old_hidden_prompt_func = termui.hidden_prompt_func
+        old__getchar_func = termui._getchar
+        old_should_strip_ansi = utils.should_strip_ansi  # type: ignore
+        termui.visible_prompt_func = visible_input
+        termui.hidden_prompt_func = hidden_input
+        termui._getchar = _getchar
+        utils.should_strip_ansi = should_strip_ansi  # type: ignore
+
+        old_env = {}
+        try:
+            for key, value in env.items():
+                old_env[key] = os.environ.get(key)
+                if value is None:
+                    try:
+                        del os.environ[key]
+                    except Exception:
+                        pass
+                else:
+                    os.environ[key] = value
+            yield (bytes_output, bytes_error)
+        finally:
+            for key, value in old_env.items():
+                if value is None:
+                    try:
+                        del os.environ[key]
+                    except Exception:
+                        pass
+                else:
+                    os.environ[key] = value
+            sys.stdout = old_stdout
+            sys.stderr = old_stderr
+            sys.stdin = old_stdin
+            termui.visible_prompt_func = old_visible_prompt_func
+            termui.hidden_prompt_func = old_hidden_prompt_func
+            termui._getchar = old__getchar_func
+            utils.should_strip_ansi = old_should_strip_ansi  # type: ignore
+            formatting.FORCED_WIDTH = old_forced_width
+
+    def invoke(
+        self,
+        cli: "BaseCommand",
+        args: t.Optional[t.Union[str, t.Sequence[str]]] = None,
+        input: t.Optional[t.Union[str, bytes, t.IO]] = None,
+        env: t.Optional[t.Mapping[str, t.Optional[str]]] = None,
+        catch_exceptions: bool = True,
+        color: bool = False,
+        **extra: t.Any,
+    ) -> Result:
+        """Invokes a command in an isolated environment.  The arguments are
+        forwarded directly to the command line script, the `extra` keyword
+        arguments are passed to the :meth:`~clickpkg.Command.main` function of
+        the command.
+
+        This returns a :class:`Result` object.
+
+        :param cli: the command to invoke
+        :param args: the arguments to invoke. It may be given as an iterable
+                     or a string. When given as string it will be interpreted
+                     as a Unix shell command. More details at
+                     :func:`shlex.split`.
+        :param input: the input data for `sys.stdin`.
+        :param env: the environment overrides.
+        :param catch_exceptions: Whether to catch any other exceptions than
+                                 ``SystemExit``.
+        :param extra: the keyword arguments to pass to :meth:`main`.
+        :param color: whether the output should contain color codes. The
+                      application can still override this explicitly.
+
+        .. versionchanged:: 8.0
+            The result object has the ``return_value`` attribute with
+            the value returned from the invoked command.
+
+        .. versionchanged:: 4.0
+            Added the ``color`` parameter.
+
+        .. versionchanged:: 3.0
+            Added the ``catch_exceptions`` parameter.
+
+        .. versionchanged:: 3.0
+            The result object has the ``exc_info`` attribute with the
+            traceback if available.
+        """
+        exc_info = None
+        with self.isolation(input=input, env=env, color=color) as outstreams:
+            return_value = None
+            exception: t.Optional[BaseException] = None
+            exit_code = 0
+
+            if isinstance(args, str):
+                args = shlex.split(args)
+
+            try:
+                prog_name = extra.pop("prog_name")
+            except KeyError:
+                prog_name = self.get_default_prog_name(cli)
+
+            try:
+                return_value = cli.main(args=args or (), prog_name=prog_name, **extra)
+            except SystemExit as e:
+                exc_info = sys.exc_info()
+                e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code)
+
+                if e_code is None:
+                    e_code = 0
+
+                if e_code != 0:
+                    exception = e
+
+                if not isinstance(e_code, int):
+                    sys.stdout.write(str(e_code))
+                    sys.stdout.write("\n")
+                    e_code = 1
+
+                exit_code = e_code
+
+            except Exception as e:
+                if not catch_exceptions:
+                    raise
+                exception = e
+                exit_code = 1
+                exc_info = sys.exc_info()
+            finally:
+                sys.stdout.flush()
+                stdout = outstreams[0].getvalue()
+                if self.mix_stderr:
+                    stderr = None
+                else:
+                    stderr = outstreams[1].getvalue()  # type: ignore
+
+        return Result(
+            runner=self,
+            stdout_bytes=stdout,
+            stderr_bytes=stderr,
+            return_value=return_value,
+            exit_code=exit_code,
+            exception=exception,
+            exc_info=exc_info,  # type: ignore
+        )
+
+    @contextlib.contextmanager
+    def isolated_filesystem(
+        self, temp_dir: t.Optional[t.Union[str, os.PathLike]] = None
+    ) -> t.Iterator[str]:
+        """A context manager that creates a temporary directory and
+        changes the current working directory to it. This isolates tests
+        that affect the contents of the CWD to prevent them from
+        interfering with each other.
+
+        :param temp_dir: Create the temporary directory under this
+            directory. If given, the created directory is not removed
+            when exiting.
+
+        .. versionchanged:: 8.0
+            Added the ``temp_dir`` parameter.
+        """
+        cwd = os.getcwd()
+        dt = tempfile.mkdtemp(dir=temp_dir)  # type: ignore[type-var]
+        os.chdir(dt)
+
+        try:
+            yield t.cast(str, dt)
+        finally:
+            os.chdir(cwd)
+
+            if temp_dir is None:
+                try:
+                    shutil.rmtree(dt)
+                except OSError:  # noqa: B014
+                    pass
diff --git a/venv/lib/python3.9/site-packages/click/types.py b/venv/lib/python3.9/site-packages/click/types.py
new file mode 100644
index 0000000..b45ee53
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/types.py
@@ -0,0 +1,1073 @@
+import os
+import stat
+import typing as t
+from datetime import datetime
+from gettext import gettext as _
+from gettext import ngettext
+
+from ._compat import _get_argv_encoding
+from ._compat import get_filesystem_encoding
+from ._compat import open_stream
+from .exceptions import BadParameter
+from .utils import LazyFile
+from .utils import safecall
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .core import Context
+    from .core import Parameter
+    from .shell_completion import CompletionItem
+
+
+class ParamType:
+    """Represents the type of a parameter. Validates and converts values
+    from the command line or Python into the correct type.
+
+    To implement a custom type, subclass and implement at least the
+    following:
+
+    -   The :attr:`name` class attribute must be set.
+    -   Calling an instance of the type with ``None`` must return
+        ``None``. This is already implemented by default.
+    -   :meth:`convert` must convert string values to the correct type.
+    -   :meth:`convert` must accept values that are already the correct
+        type.
+    -   It must be able to convert a value if the ``ctx`` and ``param``
+        arguments are ``None``. This can occur when converting prompt
+        input.
+    """
+
+    is_composite: t.ClassVar[bool] = False
+    arity: t.ClassVar[int] = 1
+
+    #: the descriptive name of this type
+    name: str
+
+    #: if a list of this type is expected and the value is pulled from a
+    #: string environment variable, this is what splits it up.  `None`
+    #: means any whitespace.  For all parameters the general rule is that
+    #: whitespace splits them up.  The exception are paths and files which
+    #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on
+    #: Windows).
+    envvar_list_splitter: t.ClassVar[t.Optional[str]] = None
+
+    def to_info_dict(self) -> t.Dict[str, t.Any]:
+        """Gather information that could be useful for a tool generating
+        user-facing documentation.
+
+        Use :meth:`click.Context.to_info_dict` to traverse the entire
+        CLI structure.
+
+        .. versionadded:: 8.0
+        """
+        # The class name without the "ParamType" suffix.
+        param_type = type(self).__name__.partition("ParamType")[0]
+        param_type = param_type.partition("ParameterType")[0]
+
+        # Custom subclasses might not remember to set a name.
+        if hasattr(self, "name"):
+            name = self.name
+        else:
+            name = param_type
+
+        return {"param_type": param_type, "name": name}
+
+    def __call__(
+        self,
+        value: t.Any,
+        param: t.Optional["Parameter"] = None,
+        ctx: t.Optional["Context"] = None,
+    ) -> t.Any:
+        if value is not None:
+            return self.convert(value, param, ctx)
+
+    def get_metavar(self, param: "Parameter") -> t.Optional[str]:
+        """Returns the metavar default for this param if it provides one."""
+
+    def get_missing_message(self, param: "Parameter") -> t.Optional[str]:
+        """Optionally might return extra information about a missing
+        parameter.
+
+        .. versionadded:: 2.0
+        """
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        """Convert the value to the correct type. This is not called if
+        the value is ``None`` (the missing value).
+
+        This must accept string values from the command line, as well as
+        values that are already the correct type. It may also convert
+        other compatible types.
+
+        The ``param`` and ``ctx`` arguments may be ``None`` in certain
+        situations, such as when converting prompt input.
+
+        If the value cannot be converted, call :meth:`fail` with a
+        descriptive message.
+
+        :param value: The value to convert.
+        :param param: The parameter that is using this type to convert
+            its value. May be ``None``.
+        :param ctx: The current context that arrived at this value. May
+            be ``None``.
+        """
+        return value
+
+    def split_envvar_value(self, rv: str) -> t.Sequence[str]:
+        """Given a value from an environment variable this splits it up
+        into small chunks depending on the defined envvar list splitter.
+
+        If the splitter is set to `None`, which means that whitespace splits,
+        then leading and trailing whitespace is ignored.  Otherwise, leading
+        and trailing splitters usually lead to empty items being included.
+        """
+        return (rv or "").split(self.envvar_list_splitter)
+
+    def fail(
+        self,
+        message: str,
+        param: t.Optional["Parameter"] = None,
+        ctx: t.Optional["Context"] = None,
+    ) -> "t.NoReturn":
+        """Helper method to fail with an invalid value message."""
+        raise BadParameter(message, ctx=ctx, param=param)
+
+    def shell_complete(
+        self, ctx: "Context", param: "Parameter", incomplete: str
+    ) -> t.List["CompletionItem"]:
+        """Return a list of
+        :class:`~click.shell_completion.CompletionItem` objects for the
+        incomplete value. Most types do not provide completions, but
+        some do, and this allows custom types to provide custom
+        completions as well.
+
+        :param ctx: Invocation context for this command.
+        :param param: The parameter that is requesting completion.
+        :param incomplete: Value being completed. May be empty.
+
+        .. versionadded:: 8.0
+        """
+        return []
+
+
+class CompositeParamType(ParamType):
+    is_composite = True
+
+    @property
+    def arity(self) -> int:  # type: ignore
+        raise NotImplementedError()
+
+
+class FuncParamType(ParamType):
+    def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None:
+        self.name = func.__name__
+        self.func = func
+
+    def to_info_dict(self) -> t.Dict[str, t.Any]:
+        info_dict = super().to_info_dict()
+        info_dict["func"] = self.func
+        return info_dict
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        try:
+            return self.func(value)
+        except ValueError:
+            try:
+                value = str(value)
+            except UnicodeError:
+                value = value.decode("utf-8", "replace")
+
+            self.fail(value, param, ctx)
+
+
+class UnprocessedParamType(ParamType):
+    name = "text"
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        return value
+
+    def __repr__(self) -> str:
+        return "UNPROCESSED"
+
+
+class StringParamType(ParamType):
+    name = "text"
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        if isinstance(value, bytes):
+            enc = _get_argv_encoding()
+            try:
+                value = value.decode(enc)
+            except UnicodeError:
+                fs_enc = get_filesystem_encoding()
+                if fs_enc != enc:
+                    try:
+                        value = value.decode(fs_enc)
+                    except UnicodeError:
+                        value = value.decode("utf-8", "replace")
+                else:
+                    value = value.decode("utf-8", "replace")
+            return value
+        return str(value)
+
+    def __repr__(self) -> str:
+        return "STRING"
+
+
+class Choice(ParamType):
+    """The choice type allows a value to be checked against a fixed set
+    of supported values. All of these values have to be strings.
+
+    You should only pass a list or tuple of choices. Other iterables
+    (like generators) may lead to surprising results.
+
+    The resulting value will always be one of the originally passed choices
+    regardless of ``case_sensitive`` or any ``ctx.token_normalize_func``
+    being specified.
+
+    See :ref:`choice-opts` for an example.
+
+    :param case_sensitive: Set to false to make choices case
+        insensitive. Defaults to true.
+    """
+
+    name = "choice"
+
+    def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None:
+        self.choices = choices
+        self.case_sensitive = case_sensitive
+
+    def to_info_dict(self) -> t.Dict[str, t.Any]:
+        info_dict = super().to_info_dict()
+        info_dict["choices"] = self.choices
+        info_dict["case_sensitive"] = self.case_sensitive
+        return info_dict
+
+    def get_metavar(self, param: "Parameter") -> str:
+        choices_str = "|".join(self.choices)
+
+        # Use curly braces to indicate a required argument.
+        if param.required and param.param_type_name == "argument":
+            return f"{{{choices_str}}}"
+
+        # Use square braces to indicate an option or optional argument.
+        return f"[{choices_str}]"
+
+    def get_missing_message(self, param: "Parameter") -> str:
+        return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices))
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        # Match through normalization and case sensitivity
+        # first do token_normalize_func, then lowercase
+        # preserve original `value` to produce an accurate message in
+        # `self.fail`
+        normed_value = value
+        normed_choices = {choice: choice for choice in self.choices}
+
+        if ctx is not None and ctx.token_normalize_func is not None:
+            normed_value = ctx.token_normalize_func(value)
+            normed_choices = {
+                ctx.token_normalize_func(normed_choice): original
+                for normed_choice, original in normed_choices.items()
+            }
+
+        if not self.case_sensitive:
+            normed_value = normed_value.casefold()
+            normed_choices = {
+                normed_choice.casefold(): original
+                for normed_choice, original in normed_choices.items()
+            }
+
+        if normed_value in normed_choices:
+            return normed_choices[normed_value]
+
+        choices_str = ", ".join(map(repr, self.choices))
+        self.fail(
+            ngettext(
+                "{value!r} is not {choice}.",
+                "{value!r} is not one of {choices}.",
+                len(self.choices),
+            ).format(value=value, choice=choices_str, choices=choices_str),
+            param,
+            ctx,
+        )
+
+    def __repr__(self) -> str:
+        return f"Choice({list(self.choices)})"
+
+    def shell_complete(
+        self, ctx: "Context", param: "Parameter", incomplete: str
+    ) -> t.List["CompletionItem"]:
+        """Complete choices that start with the incomplete value.
+
+        :param ctx: Invocation context for this command.
+        :param param: The parameter that is requesting completion.
+        :param incomplete: Value being completed. May be empty.
+
+        .. versionadded:: 8.0
+        """
+        from click.shell_completion import CompletionItem
+
+        str_choices = map(str, self.choices)
+
+        if self.case_sensitive:
+            matched = (c for c in str_choices if c.startswith(incomplete))
+        else:
+            incomplete = incomplete.lower()
+            matched = (c for c in str_choices if c.lower().startswith(incomplete))
+
+        return [CompletionItem(c) for c in matched]
+
+
+class DateTime(ParamType):
+    """The DateTime type converts date strings into `datetime` objects.
+
+    The format strings which are checked are configurable, but default to some
+    common (non-timezone aware) ISO 8601 formats.
+
+    When specifying *DateTime* formats, you should only pass a list or a tuple.
+    Other iterables, like generators, may lead to surprising results.
+
+    The format strings are processed using ``datetime.strptime``, and this
+    consequently defines the format strings which are allowed.
+
+    Parsing is tried using each format, in order, and the first format which
+    parses successfully is used.
+
+    :param formats: A list or tuple of date format strings, in the order in
+                    which they should be tried. Defaults to
+                    ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``,
+                    ``'%Y-%m-%d %H:%M:%S'``.
+    """
+
+    name = "datetime"
+
+    def __init__(self, formats: t.Optional[t.Sequence[str]] = None):
+        self.formats = formats or ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"]
+
+    def to_info_dict(self) -> t.Dict[str, t.Any]:
+        info_dict = super().to_info_dict()
+        info_dict["formats"] = self.formats
+        return info_dict
+
+    def get_metavar(self, param: "Parameter") -> str:
+        return f"[{'|'.join(self.formats)}]"
+
+    def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]:
+        try:
+            return datetime.strptime(value, format)
+        except ValueError:
+            return None
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        if isinstance(value, datetime):
+            return value
+
+        for format in self.formats:
+            converted = self._try_to_convert_date(value, format)
+
+            if converted is not None:
+                return converted
+
+        formats_str = ", ".join(map(repr, self.formats))
+        self.fail(
+            ngettext(
+                "{value!r} does not match the format {format}.",
+                "{value!r} does not match the formats {formats}.",
+                len(self.formats),
+            ).format(value=value, format=formats_str, formats=formats_str),
+            param,
+            ctx,
+        )
+
+    def __repr__(self) -> str:
+        return "DateTime"
+
+
+class _NumberParamTypeBase(ParamType):
+    _number_class: t.ClassVar[t.Type]
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        try:
+            return self._number_class(value)
+        except ValueError:
+            self.fail(
+                _("{value!r} is not a valid {number_type}.").format(
+                    value=value, number_type=self.name
+                ),
+                param,
+                ctx,
+            )
+
+
+class _NumberRangeBase(_NumberParamTypeBase):
+    def __init__(
+        self,
+        min: t.Optional[float] = None,
+        max: t.Optional[float] = None,
+        min_open: bool = False,
+        max_open: bool = False,
+        clamp: bool = False,
+    ) -> None:
+        self.min = min
+        self.max = max
+        self.min_open = min_open
+        self.max_open = max_open
+        self.clamp = clamp
+
+    def to_info_dict(self) -> t.Dict[str, t.Any]:
+        info_dict = super().to_info_dict()
+        info_dict.update(
+            min=self.min,
+            max=self.max,
+            min_open=self.min_open,
+            max_open=self.max_open,
+            clamp=self.clamp,
+        )
+        return info_dict
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        import operator
+
+        rv = super().convert(value, param, ctx)
+        lt_min: bool = self.min is not None and (
+            operator.le if self.min_open else operator.lt
+        )(rv, self.min)
+        gt_max: bool = self.max is not None and (
+            operator.ge if self.max_open else operator.gt
+        )(rv, self.max)
+
+        if self.clamp:
+            if lt_min:
+                return self._clamp(self.min, 1, self.min_open)  # type: ignore
+
+            if gt_max:
+                return self._clamp(self.max, -1, self.max_open)  # type: ignore
+
+        if lt_min or gt_max:
+            self.fail(
+                _("{value} is not in the range {range}.").format(
+                    value=rv, range=self._describe_range()
+                ),
+                param,
+                ctx,
+            )
+
+        return rv
+
+    def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float:
+        """Find the valid value to clamp to bound in the given
+        direction.
+
+        :param bound: The boundary value.
+        :param dir: 1 or -1 indicating the direction to move.
+        :param open: If true, the range does not include the bound.
+        """
+        raise NotImplementedError
+
+    def _describe_range(self) -> str:
+        """Describe the range for use in help text."""
+        if self.min is None:
+            op = "<" if self.max_open else "<="
+            return f"x{op}{self.max}"
+
+        if self.max is None:
+            op = ">" if self.min_open else ">="
+            return f"x{op}{self.min}"
+
+        lop = "<" if self.min_open else "<="
+        rop = "<" if self.max_open else "<="
+        return f"{self.min}{lop}x{rop}{self.max}"
+
+    def __repr__(self) -> str:
+        clamp = " clamped" if self.clamp else ""
+        return f"<{type(self).__name__} {self._describe_range()}{clamp}>"
+
+
+class IntParamType(_NumberParamTypeBase):
+    name = "integer"
+    _number_class = int
+
+    def __repr__(self) -> str:
+        return "INT"
+
+
+class IntRange(_NumberRangeBase, IntParamType):
+    """Restrict an :data:`click.INT` value to a range of accepted
+    values. See :ref:`ranges`.
+
+    If ``min`` or ``max`` are not passed, any value is accepted in that
+    direction. If ``min_open`` or ``max_open`` are enabled, the
+    corresponding boundary is not included in the range.
+
+    If ``clamp`` is enabled, a value outside the range is clamped to the
+    boundary instead of failing.
+
+    .. versionchanged:: 8.0
+        Added the ``min_open`` and ``max_open`` parameters.
+    """
+
+    name = "integer range"
+
+    def _clamp(  # type: ignore
+        self, bound: int, dir: "te.Literal[1, -1]", open: bool
+    ) -> int:
+        if not open:
+            return bound
+
+        return bound + dir
+
+
+class FloatParamType(_NumberParamTypeBase):
+    name = "float"
+    _number_class = float
+
+    def __repr__(self) -> str:
+        return "FLOAT"
+
+
+class FloatRange(_NumberRangeBase, FloatParamType):
+    """Restrict a :data:`click.FLOAT` value to a range of accepted
+    values. See :ref:`ranges`.
+
+    If ``min`` or ``max`` are not passed, any value is accepted in that
+    direction. If ``min_open`` or ``max_open`` are enabled, the
+    corresponding boundary is not included in the range.
+
+    If ``clamp`` is enabled, a value outside the range is clamped to the
+    boundary instead of failing. This is not supported if either
+    boundary is marked ``open``.
+
+    .. versionchanged:: 8.0
+        Added the ``min_open`` and ``max_open`` parameters.
+    """
+
+    name = "float range"
+
+    def __init__(
+        self,
+        min: t.Optional[float] = None,
+        max: t.Optional[float] = None,
+        min_open: bool = False,
+        max_open: bool = False,
+        clamp: bool = False,
+    ) -> None:
+        super().__init__(
+            min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp
+        )
+
+        if (min_open or max_open) and clamp:
+            raise TypeError("Clamping is not supported for open bounds.")
+
+    def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float:
+        if not open:
+            return bound
+
+        # Could use Python 3.9's math.nextafter here, but clamping an
+        # open float range doesn't seem to be particularly useful. It's
+        # left up to the user to write a callback to do it if needed.
+        raise RuntimeError("Clamping is not supported for open bounds.")
+
+
+class BoolParamType(ParamType):
+    name = "boolean"
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        if value in {False, True}:
+            return bool(value)
+
+        norm = value.strip().lower()
+
+        if norm in {"1", "true", "t", "yes", "y", "on"}:
+            return True
+
+        if norm in {"0", "false", "f", "no", "n", "off"}:
+            return False
+
+        self.fail(
+            _("{value!r} is not a valid boolean.").format(value=value), param, ctx
+        )
+
+    def __repr__(self) -> str:
+        return "BOOL"
+
+
+class UUIDParameterType(ParamType):
+    name = "uuid"
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        import uuid
+
+        if isinstance(value, uuid.UUID):
+            return value
+
+        value = value.strip()
+
+        try:
+            return uuid.UUID(value)
+        except ValueError:
+            self.fail(
+                _("{value!r} is not a valid UUID.").format(value=value), param, ctx
+            )
+
+    def __repr__(self) -> str:
+        return "UUID"
+
+
+class File(ParamType):
+    """Declares a parameter to be a file for reading or writing.  The file
+    is automatically closed once the context tears down (after the command
+    finished working).
+
+    Files can be opened for reading or writing.  The special value ``-``
+    indicates stdin or stdout depending on the mode.
+
+    By default, the file is opened for reading text data, but it can also be
+    opened in binary mode or for writing.  The encoding parameter can be used
+    to force a specific encoding.
+
+    The `lazy` flag controls if the file should be opened immediately or upon
+    first IO. The default is to be non-lazy for standard input and output
+    streams as well as files opened for reading, `lazy` otherwise. When opening a
+    file lazily for reading, it is still opened temporarily for validation, but
+    will not be held open until first IO. lazy is mainly useful when opening
+    for writing to avoid creating the file until it is needed.
+
+    Starting with Click 2.0, files can also be opened atomically in which
+    case all writes go into a separate file in the same folder and upon
+    completion the file will be moved over to the original location.  This
+    is useful if a file regularly read by other users is modified.
+
+    See :ref:`file-args` for more information.
+    """
+
+    name = "filename"
+    envvar_list_splitter = os.path.pathsep
+
+    def __init__(
+        self,
+        mode: str = "r",
+        encoding: t.Optional[str] = None,
+        errors: t.Optional[str] = "strict",
+        lazy: t.Optional[bool] = None,
+        atomic: bool = False,
+    ) -> None:
+        self.mode = mode
+        self.encoding = encoding
+        self.errors = errors
+        self.lazy = lazy
+        self.atomic = atomic
+
+    def to_info_dict(self) -> t.Dict[str, t.Any]:
+        info_dict = super().to_info_dict()
+        info_dict.update(mode=self.mode, encoding=self.encoding)
+        return info_dict
+
+    def resolve_lazy_flag(self, value: t.Any) -> bool:
+        if self.lazy is not None:
+            return self.lazy
+        if value == "-":
+            return False
+        elif "w" in self.mode:
+            return True
+        return False
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        try:
+            if hasattr(value, "read") or hasattr(value, "write"):
+                return value
+
+            lazy = self.resolve_lazy_flag(value)
+
+            if lazy:
+                f: t.IO = t.cast(
+                    t.IO,
+                    LazyFile(
+                        value, self.mode, self.encoding, self.errors, atomic=self.atomic
+                    ),
+                )
+
+                if ctx is not None:
+                    ctx.call_on_close(f.close_intelligently)  # type: ignore
+
+                return f
+
+            f, should_close = open_stream(
+                value, self.mode, self.encoding, self.errors, atomic=self.atomic
+            )
+
+            # If a context is provided, we automatically close the file
+            # at the end of the context execution (or flush out).  If a
+            # context does not exist, it's the caller's responsibility to
+            # properly close the file.  This for instance happens when the
+            # type is used with prompts.
+            if ctx is not None:
+                if should_close:
+                    ctx.call_on_close(safecall(f.close))
+                else:
+                    ctx.call_on_close(safecall(f.flush))
+
+            return f
+        except OSError as e:  # noqa: B014
+            self.fail(f"'{os.fsdecode(value)}': {e.strerror}", param, ctx)
+
+    def shell_complete(
+        self, ctx: "Context", param: "Parameter", incomplete: str
+    ) -> t.List["CompletionItem"]:
+        """Return a special completion marker that tells the completion
+        system to use the shell to provide file path completions.
+
+        :param ctx: Invocation context for this command.
+        :param param: The parameter that is requesting completion.
+        :param incomplete: Value being completed. May be empty.
+
+        .. versionadded:: 8.0
+        """
+        from click.shell_completion import CompletionItem
+
+        return [CompletionItem(incomplete, type="file")]
+
+
+class Path(ParamType):
+    """The ``Path`` type is similar to the :class:`File` type, but
+    returns the filename instead of an open file. Various checks can be
+    enabled to validate the type of file and permissions.
+
+    :param exists: The file or directory needs to exist for the value to
+        be valid. If this is not set to ``True``, and the file does not
+        exist, then all further checks are silently skipped.
+    :param file_okay: Allow a file as a value.
+    :param dir_okay: Allow a directory as a value.
+    :param readable: if true, a readable check is performed.
+    :param writable: if true, a writable check is performed.
+    :param executable: if true, an executable check is performed.
+    :param resolve_path: Make the value absolute and resolve any
+        symlinks. A ``~`` is not expanded, as this is supposed to be
+        done by the shell only.
+    :param allow_dash: Allow a single dash as a value, which indicates
+        a standard stream (but does not open it). Use
+        :func:`~click.open_file` to handle opening this value.
+    :param path_type: Convert the incoming path value to this type. If
+        ``None``, keep Python's default, which is ``str``. Useful to
+        convert to :class:`pathlib.Path`.
+
+    .. versionchanged:: 8.1
+        Added the ``executable`` parameter.
+
+    .. versionchanged:: 8.0
+        Allow passing ``type=pathlib.Path``.
+
+    .. versionchanged:: 6.0
+        Added the ``allow_dash`` parameter.
+    """
+
+    envvar_list_splitter = os.path.pathsep
+
+    def __init__(
+        self,
+        exists: bool = False,
+        file_okay: bool = True,
+        dir_okay: bool = True,
+        writable: bool = False,
+        readable: bool = True,
+        resolve_path: bool = False,
+        allow_dash: bool = False,
+        path_type: t.Optional[t.Type] = None,
+        executable: bool = False,
+    ):
+        self.exists = exists
+        self.file_okay = file_okay
+        self.dir_okay = dir_okay
+        self.readable = readable
+        self.writable = writable
+        self.executable = executable
+        self.resolve_path = resolve_path
+        self.allow_dash = allow_dash
+        self.type = path_type
+
+        if self.file_okay and not self.dir_okay:
+            self.name = _("file")
+        elif self.dir_okay and not self.file_okay:
+            self.name = _("directory")
+        else:
+            self.name = _("path")
+
+    def to_info_dict(self) -> t.Dict[str, t.Any]:
+        info_dict = super().to_info_dict()
+        info_dict.update(
+            exists=self.exists,
+            file_okay=self.file_okay,
+            dir_okay=self.dir_okay,
+            writable=self.writable,
+            readable=self.readable,
+            allow_dash=self.allow_dash,
+        )
+        return info_dict
+
+    def coerce_path_result(self, rv: t.Any) -> t.Any:
+        if self.type is not None and not isinstance(rv, self.type):
+            if self.type is str:
+                rv = os.fsdecode(rv)
+            elif self.type is bytes:
+                rv = os.fsencode(rv)
+            else:
+                rv = self.type(rv)
+
+        return rv
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        rv = value
+
+        is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-")
+
+        if not is_dash:
+            if self.resolve_path:
+                # os.path.realpath doesn't resolve symlinks on Windows
+                # until Python 3.8. Use pathlib for now.
+                import pathlib
+
+                rv = os.fsdecode(pathlib.Path(rv).resolve())
+
+            try:
+                st = os.stat(rv)
+            except OSError:
+                if not self.exists:
+                    return self.coerce_path_result(rv)
+                self.fail(
+                    _("{name} {filename!r} does not exist.").format(
+                        name=self.name.title(), filename=os.fsdecode(value)
+                    ),
+                    param,
+                    ctx,
+                )
+
+            if not self.file_okay and stat.S_ISREG(st.st_mode):
+                self.fail(
+                    _("{name} {filename!r} is a file.").format(
+                        name=self.name.title(), filename=os.fsdecode(value)
+                    ),
+                    param,
+                    ctx,
+                )
+            if not self.dir_okay and stat.S_ISDIR(st.st_mode):
+                self.fail(
+                    _("{name} '{filename}' is a directory.").format(
+                        name=self.name.title(), filename=os.fsdecode(value)
+                    ),
+                    param,
+                    ctx,
+                )
+
+            if self.readable and not os.access(rv, os.R_OK):
+                self.fail(
+                    _("{name} {filename!r} is not readable.").format(
+                        name=self.name.title(), filename=os.fsdecode(value)
+                    ),
+                    param,
+                    ctx,
+                )
+
+            if self.writable and not os.access(rv, os.W_OK):
+                self.fail(
+                    _("{name} {filename!r} is not writable.").format(
+                        name=self.name.title(), filename=os.fsdecode(value)
+                    ),
+                    param,
+                    ctx,
+                )
+
+            if self.executable and not os.access(value, os.X_OK):
+                self.fail(
+                    _("{name} {filename!r} is not executable.").format(
+                        name=self.name.title(), filename=os.fsdecode(value)
+                    ),
+                    param,
+                    ctx,
+                )
+
+        return self.coerce_path_result(rv)
+
+    def shell_complete(
+        self, ctx: "Context", param: "Parameter", incomplete: str
+    ) -> t.List["CompletionItem"]:
+        """Return a special completion marker that tells the completion
+        system to use the shell to provide path completions for only
+        directories or any paths.
+
+        :param ctx: Invocation context for this command.
+        :param param: The parameter that is requesting completion.
+        :param incomplete: Value being completed. May be empty.
+
+        .. versionadded:: 8.0
+        """
+        from click.shell_completion import CompletionItem
+
+        type = "dir" if self.dir_okay and not self.file_okay else "file"
+        return [CompletionItem(incomplete, type=type)]
+
+
+class Tuple(CompositeParamType):
+    """The default behavior of Click is to apply a type on a value directly.
+    This works well in most cases, except for when `nargs` is set to a fixed
+    count and different types should be used for different items.  In this
+    case the :class:`Tuple` type can be used.  This type can only be used
+    if `nargs` is set to a fixed number.
+
+    For more information see :ref:`tuple-type`.
+
+    This can be selected by using a Python tuple literal as a type.
+
+    :param types: a list of types that should be used for the tuple items.
+    """
+
+    def __init__(self, types: t.Sequence[t.Union[t.Type, ParamType]]) -> None:
+        self.types = [convert_type(ty) for ty in types]
+
+    def to_info_dict(self) -> t.Dict[str, t.Any]:
+        info_dict = super().to_info_dict()
+        info_dict["types"] = [t.to_info_dict() for t in self.types]
+        return info_dict
+
+    @property
+    def name(self) -> str:  # type: ignore
+        return f"<{' '.join(ty.name for ty in self.types)}>"
+
+    @property
+    def arity(self) -> int:  # type: ignore
+        return len(self.types)
+
+    def convert(
+        self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
+    ) -> t.Any:
+        len_type = len(self.types)
+        len_value = len(value)
+
+        if len_value != len_type:
+            self.fail(
+                ngettext(
+                    "{len_type} values are required, but {len_value} was given.",
+                    "{len_type} values are required, but {len_value} were given.",
+                    len_value,
+                ).format(len_type=len_type, len_value=len_value),
+                param=param,
+                ctx=ctx,
+            )
+
+        return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value))
+
+
+def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType:
+    """Find the most appropriate :class:`ParamType` for the given Python
+    type. If the type isn't provided, it can be inferred from a default
+    value.
+    """
+    guessed_type = False
+
+    if ty is None and default is not None:
+        if isinstance(default, (tuple, list)):
+            # If the default is empty, ty will remain None and will
+            # return STRING.
+            if default:
+                item = default[0]
+
+                # A tuple of tuples needs to detect the inner types.
+                # Can't call convert recursively because that would
+                # incorrectly unwind the tuple to a single type.
+                if isinstance(item, (tuple, list)):
+                    ty = tuple(map(type, item))
+                else:
+                    ty = type(item)
+        else:
+            ty = type(default)
+
+        guessed_type = True
+
+    if isinstance(ty, tuple):
+        return Tuple(ty)
+
+    if isinstance(ty, ParamType):
+        return ty
+
+    if ty is str or ty is None:
+        return STRING
+
+    if ty is int:
+        return INT
+
+    if ty is float:
+        return FLOAT
+
+    if ty is bool:
+        return BOOL
+
+    if guessed_type:
+        return STRING
+
+    if __debug__:
+        try:
+            if issubclass(ty, ParamType):
+                raise AssertionError(
+                    f"Attempted to use an uninstantiated parameter type ({ty})."
+                )
+        except TypeError:
+            # ty is an instance (correct), so issubclass fails.
+            pass
+
+    return FuncParamType(ty)
+
+
+#: A dummy parameter type that just does nothing.  From a user's
+#: perspective this appears to just be the same as `STRING` but
+#: internally no string conversion takes place if the input was bytes.
+#: This is usually useful when working with file paths as they can
+#: appear in bytes and unicode.
+#:
+#: For path related uses the :class:`Path` type is a better choice but
+#: there are situations where an unprocessed type is useful which is why
+#: it is is provided.
+#:
+#: .. versionadded:: 4.0
+UNPROCESSED = UnprocessedParamType()
+
+#: A unicode string parameter type which is the implicit default.  This
+#: can also be selected by using ``str`` as type.
+STRING = StringParamType()
+
+#: An integer parameter.  This can also be selected by using ``int`` as
+#: type.
+INT = IntParamType()
+
+#: A floating point value parameter.  This can also be selected by using
+#: ``float`` as type.
+FLOAT = FloatParamType()
+
+#: A boolean parameter.  This is the default for boolean flags.  This can
+#: also be selected by using ``bool`` as a type.
+BOOL = BoolParamType()
+
+#: A UUID parameter.
+UUID = UUIDParameterType()
diff --git a/venv/lib/python3.9/site-packages/click/utils.py b/venv/lib/python3.9/site-packages/click/utils.py
new file mode 100644
index 0000000..8283788
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/click/utils.py
@@ -0,0 +1,580 @@
+import os
+import re
+import sys
+import typing as t
+from functools import update_wrapper
+from types import ModuleType
+
+from ._compat import _default_text_stderr
+from ._compat import _default_text_stdout
+from ._compat import _find_binary_writer
+from ._compat import auto_wrap_for_ansi
+from ._compat import binary_streams
+from ._compat import get_filesystem_encoding
+from ._compat import open_stream
+from ._compat import should_strip_ansi
+from ._compat import strip_ansi
+from ._compat import text_streams
+from ._compat import WIN
+from .globals import resolve_color_default
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
+
+def _posixify(name: str) -> str:
+    return "-".join(name.split()).lower()
+
+
+def safecall(func: F) -> F:
+    """Wraps a function so that it swallows exceptions."""
+
+    def wrapper(*args, **kwargs):  # type: ignore
+        try:
+            return func(*args, **kwargs)
+        except Exception:
+            pass
+
+    return update_wrapper(t.cast(F, wrapper), func)
+
+
+def make_str(value: t.Any) -> str:
+    """Converts a value into a valid string."""
+    if isinstance(value, bytes):
+        try:
+            return value.decode(get_filesystem_encoding())
+        except UnicodeError:
+            return value.decode("utf-8", "replace")
+    return str(value)
+
+
+def make_default_short_help(help: str, max_length: int = 45) -> str:
+    """Returns a condensed version of help string."""
+    # Consider only the first paragraph.
+    paragraph_end = help.find("\n\n")
+
+    if paragraph_end != -1:
+        help = help[:paragraph_end]
+
+    # Collapse newlines, tabs, and spaces.
+    words = help.split()
+
+    if not words:
+        return ""
+
+    # The first paragraph started with a "no rewrap" marker, ignore it.
+    if words[0] == "\b":
+        words = words[1:]
+
+    total_length = 0
+    last_index = len(words) - 1
+
+    for i, word in enumerate(words):
+        total_length += len(word) + (i > 0)
+
+        if total_length > max_length:  # too long, truncate
+            break
+
+        if word[-1] == ".":  # sentence end, truncate without "..."
+            return " ".join(words[: i + 1])
+
+        if total_length == max_length and i != last_index:
+            break  # not at sentence end, truncate with "..."
+    else:
+        return " ".join(words)  # no truncation needed
+
+    # Account for the length of the suffix.
+    total_length += len("...")
+
+    # remove words until the length is short enough
+    while i > 0:
+        total_length -= len(words[i]) + (i > 0)
+
+        if total_length <= max_length:
+            break
+
+        i -= 1
+
+    return " ".join(words[:i]) + "..."
+
+
+class LazyFile:
+    """A lazy file works like a regular file but it does not fully open
+    the file but it does perform some basic checks early to see if the
+    filename parameter does make sense.  This is useful for safely opening
+    files for writing.
+    """
+
+    def __init__(
+        self,
+        filename: str,
+        mode: str = "r",
+        encoding: t.Optional[str] = None,
+        errors: t.Optional[str] = "strict",
+        atomic: bool = False,
+    ):
+        self.name = filename
+        self.mode = mode
+        self.encoding = encoding
+        self.errors = errors
+        self.atomic = atomic
+        self._f: t.Optional[t.IO]
+
+        if filename == "-":
+            self._f, self.should_close = open_stream(filename, mode, encoding, errors)
+        else:
+            if "r" in mode:
+                # Open and close the file in case we're opening it for
+                # reading so that we can catch at least some errors in
+                # some cases early.
+                open(filename, mode).close()
+            self._f = None
+            self.should_close = True
+
+    def __getattr__(self, name: str) -> t.Any:
+        return getattr(self.open(), name)
+
+    def __repr__(self) -> str:
+        if self._f is not None:
+            return repr(self._f)
+        return f"<unopened file '{self.name}' {self.mode}>"
+
+    def open(self) -> t.IO:
+        """Opens the file if it's not yet open.  This call might fail with
+        a :exc:`FileError`.  Not handling this error will produce an error
+        that Click shows.
+        """
+        if self._f is not None:
+            return self._f
+        try:
+            rv, self.should_close = open_stream(
+                self.name, self.mode, self.encoding, self.errors, atomic=self.atomic
+            )
+        except OSError as e:  # noqa: E402
+            from .exceptions import FileError
+
+            raise FileError(self.name, hint=e.strerror) from e
+        self._f = rv
+        return rv
+
+    def close(self) -> None:
+        """Closes the underlying file, no matter what."""
+        if self._f is not None:
+            self._f.close()
+
+    def close_intelligently(self) -> None:
+        """This function only closes the file if it was opened by the lazy
+        file wrapper.  For instance this will never close stdin.
+        """
+        if self.should_close:
+            self.close()
+
+    def __enter__(self) -> "LazyFile":
+        return self
+
+    def __exit__(self, exc_type, exc_value, tb):  # type: ignore
+        self.close_intelligently()
+
+    def __iter__(self) -> t.Iterator[t.AnyStr]:
+        self.open()
+        return iter(self._f)  # type: ignore
+
+
+class KeepOpenFile:
+    def __init__(self, file: t.IO) -> None:
+        self._file = file
+
+    def __getattr__(self, name: str) -> t.Any:
+        return getattr(self._file, name)
+
+    def __enter__(self) -> "KeepOpenFile":
+        return self
+
+    def __exit__(self, exc_type, exc_value, tb):  # type: ignore
+        pass
+
+    def __repr__(self) -> str:
+        return repr(self._file)
+
+    def __iter__(self) -> t.Iterator[t.AnyStr]:
+        return iter(self._file)
+
+
+def echo(
+    message: t.Optional[t.Any] = None,
+    file: t.Optional[t.IO[t.Any]] = None,
+    nl: bool = True,
+    err: bool = False,
+    color: t.Optional[bool] = None,
+) -> None:
+    """Print a message and newline to stdout or a file. This should be
+    used instead of :func:`print` because it provides better support
+    for different data, files, and environments.
+
+    Compared to :func:`print`, this does the following:
+
+    -   Ensures that the output encoding is not misconfigured on Linux.
+    -   Supports Unicode in the Windows console.
+    -   Supports writing to binary outputs, and supports writing bytes
+        to text outputs.
+    -   Supports colors and styles on Windows.
+    -   Removes ANSI color and style codes if the output does not look
+        like an interactive terminal.
+    -   Always flushes the output.
+
+    :param message: The string or bytes to output. Other objects are
+        converted to strings.
+    :param file: The file to write to. Defaults to ``stdout``.
+    :param err: Write to ``stderr`` instead of ``stdout``.
+    :param nl: Print a newline after the message. Enabled by default.
+    :param color: Force showing or hiding colors and other styles. By
+        default Click will remove color if the output does not look like
+        an interactive terminal.
+
+    .. versionchanged:: 6.0
+        Support Unicode output on the Windows console. Click does not
+        modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()``
+        will still not support Unicode.
+
+    .. versionchanged:: 4.0
+        Added the ``color`` parameter.
+
+    .. versionadded:: 3.0
+        Added the ``err`` parameter.
+
+    .. versionchanged:: 2.0
+        Support colors on Windows if colorama is installed.
+    """
+    if file is None:
+        if err:
+            file = _default_text_stderr()
+        else:
+            file = _default_text_stdout()
+
+    # Convert non bytes/text into the native string type.
+    if message is not None and not isinstance(message, (str, bytes, bytearray)):
+        out: t.Optional[t.Union[str, bytes]] = str(message)
+    else:
+        out = message
+
+    if nl:
+        out = out or ""
+        if isinstance(out, str):
+            out += "\n"
+        else:
+            out += b"\n"
+
+    if not out:
+        file.flush()
+        return
+
+    # If there is a message and the value looks like bytes, we manually
+    # need to find the binary stream and write the message in there.
+    # This is done separately so that most stream types will work as you
+    # would expect. Eg: you can write to StringIO for other cases.
+    if isinstance(out, (bytes, bytearray)):
+        binary_file = _find_binary_writer(file)
+
+        if binary_file is not None:
+            file.flush()
+            binary_file.write(out)
+            binary_file.flush()
+            return
+
+    # ANSI style code support. For no message or bytes, nothing happens.
+    # When outputting to a file instead of a terminal, strip codes.
+    else:
+        color = resolve_color_default(color)
+
+        if should_strip_ansi(file, color):
+            out = strip_ansi(out)
+        elif WIN:
+            if auto_wrap_for_ansi is not None:
+                file = auto_wrap_for_ansi(file)  # type: ignore
+            elif not color:
+                out = strip_ansi(out)
+
+    file.write(out)  # type: ignore
+    file.flush()
+
+
+def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO:
+    """Returns a system stream for byte processing.
+
+    :param name: the name of the stream to open.  Valid names are ``'stdin'``,
+                 ``'stdout'`` and ``'stderr'``
+    """
+    opener = binary_streams.get(name)
+    if opener is None:
+        raise TypeError(f"Unknown standard stream '{name}'")
+    return opener()
+
+
+def get_text_stream(
+    name: "te.Literal['stdin', 'stdout', 'stderr']",
+    encoding: t.Optional[str] = None,
+    errors: t.Optional[str] = "strict",
+) -> t.TextIO:
+    """Returns a system stream for text processing.  This usually returns
+    a wrapped stream around a binary stream returned from
+    :func:`get_binary_stream` but it also can take shortcuts for already
+    correctly configured streams.
+
+    :param name: the name of the stream to open.  Valid names are ``'stdin'``,
+                 ``'stdout'`` and ``'stderr'``
+    :param encoding: overrides the detected default encoding.
+    :param errors: overrides the default error mode.
+    """
+    opener = text_streams.get(name)
+    if opener is None:
+        raise TypeError(f"Unknown standard stream '{name}'")
+    return opener(encoding, errors)
+
+
+def open_file(
+    filename: str,
+    mode: str = "r",
+    encoding: t.Optional[str] = None,
+    errors: t.Optional[str] = "strict",
+    lazy: bool = False,
+    atomic: bool = False,
+) -> t.IO:
+    """Open a file, with extra behavior to handle ``'-'`` to indicate
+    a standard stream, lazy open on write, and atomic write. Similar to
+    the behavior of the :class:`~click.File` param type.
+
+    If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is
+    wrapped so that using it in a context manager will not close it.
+    This makes it possible to use the function without accidentally
+    closing a standard stream:
+
+    .. code-block:: python
+
+        with open_file(filename) as f:
+            ...
+
+    :param filename: The name of the file to open, or ``'-'`` for
+        ``stdin``/``stdout``.
+    :param mode: The mode in which to open the file.
+    :param encoding: The encoding to decode or encode a file opened in
+        text mode.
+    :param errors: The error handling mode.
+    :param lazy: Wait to open the file until it is accessed. For read
+        mode, the file is temporarily opened to raise access errors
+        early, then closed until it is read again.
+    :param atomic: Write to a temporary file and replace the given file
+        on close.
+
+    .. versionadded:: 3.0
+    """
+    if lazy:
+        return t.cast(t.IO, LazyFile(filename, mode, encoding, errors, atomic=atomic))
+
+    f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic)
+
+    if not should_close:
+        f = t.cast(t.IO, KeepOpenFile(f))
+
+    return f
+
+
+def format_filename(
+    filename: t.Union[str, bytes, os.PathLike], shorten: bool = False
+) -> str:
+    """Formats a filename for user display.  The main purpose of this
+    function is to ensure that the filename can be displayed at all.  This
+    will decode the filename to unicode if necessary in a way that it will
+    not fail.  Optionally, it can shorten the filename to not include the
+    full path to the filename.
+
+    :param filename: formats a filename for UI display.  This will also convert
+                     the filename into unicode without failing.
+    :param shorten: this optionally shortens the filename to strip of the
+                    path that leads up to it.
+    """
+    if shorten:
+        filename = os.path.basename(filename)
+
+    return os.fsdecode(filename)
+
+
+def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str:
+    r"""Returns the config folder for the application.  The default behavior
+    is to return whatever is most appropriate for the operating system.
+
+    To give you an idea, for an app called ``"Foo Bar"``, something like
+    the following folders could be returned:
+
+    Mac OS X:
+      ``~/Library/Application Support/Foo Bar``
+    Mac OS X (POSIX):
+      ``~/.foo-bar``
+    Unix:
+      ``~/.config/foo-bar``
+    Unix (POSIX):
+      ``~/.foo-bar``
+    Windows (roaming):
+      ``C:\Users\<user>\AppData\Roaming\Foo Bar``
+    Windows (not roaming):
+      ``C:\Users\<user>\AppData\Local\Foo Bar``
+
+    .. versionadded:: 2.0
+
+    :param app_name: the application name.  This should be properly capitalized
+                     and can contain whitespace.
+    :param roaming: controls if the folder should be roaming or not on Windows.
+                    Has no affect otherwise.
+    :param force_posix: if this is set to `True` then on any POSIX system the
+                        folder will be stored in the home folder with a leading
+                        dot instead of the XDG config home or darwin's
+                        application support folder.
+    """
+    if WIN:
+        key = "APPDATA" if roaming else "LOCALAPPDATA"
+        folder = os.environ.get(key)
+        if folder is None:
+            folder = os.path.expanduser("~")
+        return os.path.join(folder, app_name)
+    if force_posix:
+        return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}"))
+    if sys.platform == "darwin":
+        return os.path.join(
+            os.path.expanduser("~/Library/Application Support"), app_name
+        )
+    return os.path.join(
+        os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
+        _posixify(app_name),
+    )
+
+
+class PacifyFlushWrapper:
+    """This wrapper is used to catch and suppress BrokenPipeErrors resulting
+    from ``.flush()`` being called on broken pipe during the shutdown/final-GC
+    of the Python interpreter. Notably ``.flush()`` is always called on
+    ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any
+    other cleanup code, and the case where the underlying file is not a broken
+    pipe, all calls and attributes are proxied.
+    """
+
+    def __init__(self, wrapped: t.IO) -> None:
+        self.wrapped = wrapped
+
+    def flush(self) -> None:
+        try:
+            self.wrapped.flush()
+        except OSError as e:
+            import errno
+
+            if e.errno != errno.EPIPE:
+                raise
+
+    def __getattr__(self, attr: str) -> t.Any:
+        return getattr(self.wrapped, attr)
+
+
+def _detect_program_name(
+    path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None
+) -> str:
+    """Determine the command used to run the program, for use in help
+    text. If a file or entry point was executed, the file name is
+    returned. If ``python -m`` was used to execute a module or package,
+    ``python -m name`` is returned.
+
+    This doesn't try to be too precise, the goal is to give a concise
+    name for help text. Files are only shown as their name without the
+    path. ``python`` is only shown for modules, and the full path to
+    ``sys.executable`` is not shown.
+
+    :param path: The Python file being executed. Python puts this in
+        ``sys.argv[0]``, which is used by default.
+    :param _main: The ``__main__`` module. This should only be passed
+        during internal testing.
+
+    .. versionadded:: 8.0
+        Based on command args detection in the Werkzeug reloader.
+
+    :meta private:
+    """
+    if _main is None:
+        _main = sys.modules["__main__"]
+
+    if not path:
+        path = sys.argv[0]
+
+    # The value of __package__ indicates how Python was called. It may
+    # not exist if a setuptools script is installed as an egg. It may be
+    # set incorrectly for entry points created with pip on Windows.
+    if getattr(_main, "__package__", None) is None or (
+        os.name == "nt"
+        and _main.__package__ == ""
+        and not os.path.exists(path)
+        and os.path.exists(f"{path}.exe")
+    ):
+        # Executed a file, like "python app.py".
+        return os.path.basename(path)
+
+    # Executed a module, like "python -m example".
+    # Rewritten by Python from "-m script" to "/path/to/script.py".
+    # Need to look at main module to determine how it was executed.
+    py_module = t.cast(str, _main.__package__)
+    name = os.path.splitext(os.path.basename(path))[0]
+
+    # A submodule like "example.cli".
+    if name != "__main__":
+        py_module = f"{py_module}.{name}"
+
+    return f"python -m {py_module.lstrip('.')}"
+
+
+def _expand_args(
+    args: t.Iterable[str],
+    *,
+    user: bool = True,
+    env: bool = True,
+    glob_recursive: bool = True,
+) -> t.List[str]:
+    """Simulate Unix shell expansion with Python functions.
+
+    See :func:`glob.glob`, :func:`os.path.expanduser`, and
+    :func:`os.path.expandvars`.
+
+    This is intended for use on Windows, where the shell does not do any
+    expansion. It may not exactly match what a Unix shell would do.
+
+    :param args: List of command line arguments to expand.
+    :param user: Expand user home directory.
+    :param env: Expand environment variables.
+    :param glob_recursive: ``**`` matches directories recursively.
+
+    .. versionchanged:: 8.1
+        Invalid glob patterns are treated as empty expansions rather
+        than raising an error.
+
+    .. versionadded:: 8.0
+
+    :meta private:
+    """
+    from glob import glob
+
+    out = []
+
+    for arg in args:
+        if user:
+            arg = os.path.expanduser(arg)
+
+        if env:
+            arg = os.path.expandvars(arg)
+
+        try:
+            matches = glob(arg, recursive=glob_recursive)
+        except re.error:
+            matches = []
+
+        if not matches:
+            out.append(arg)
+        else:
+            out.extend(matches)
+
+    return out
diff --git a/venv/lib/python3.9/site-packages/easy_install.py b/venv/lib/python3.9/site-packages/easy_install.py
new file mode 100644
index 0000000..d87e984
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/easy_install.py
@@ -0,0 +1,5 @@
+"""Run the EasyInstall command"""
+
+if __name__ == '__main__':
+    from setuptools.command.easy_install import main
+    main()
diff --git a/venv/lib/python3.9/site-packages/jinja2/__init__.py b/venv/lib/python3.9/site-packages/jinja2/__init__.py
new file mode 100644
index 0000000..e323926
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__init__.py
@@ -0,0 +1,37 @@
+"""Jinja is a template engine written in pure Python. It provides a
+non-XML syntax that supports inline expressions and an optional
+sandboxed environment.
+"""
+from .bccache import BytecodeCache as BytecodeCache
+from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache
+from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache
+from .environment import Environment as Environment
+from .environment import Template as Template
+from .exceptions import TemplateAssertionError as TemplateAssertionError
+from .exceptions import TemplateError as TemplateError
+from .exceptions import TemplateNotFound as TemplateNotFound
+from .exceptions import TemplateRuntimeError as TemplateRuntimeError
+from .exceptions import TemplatesNotFound as TemplatesNotFound
+from .exceptions import TemplateSyntaxError as TemplateSyntaxError
+from .exceptions import UndefinedError as UndefinedError
+from .loaders import BaseLoader as BaseLoader
+from .loaders import ChoiceLoader as ChoiceLoader
+from .loaders import DictLoader as DictLoader
+from .loaders import FileSystemLoader as FileSystemLoader
+from .loaders import FunctionLoader as FunctionLoader
+from .loaders import ModuleLoader as ModuleLoader
+from .loaders import PackageLoader as PackageLoader
+from .loaders import PrefixLoader as PrefixLoader
+from .runtime import ChainableUndefined as ChainableUndefined
+from .runtime import DebugUndefined as DebugUndefined
+from .runtime import make_logging_undefined as make_logging_undefined
+from .runtime import StrictUndefined as StrictUndefined
+from .runtime import Undefined as Undefined
+from .utils import clear_caches as clear_caches
+from .utils import is_undefined as is_undefined
+from .utils import pass_context as pass_context
+from .utils import pass_environment as pass_environment
+from .utils import pass_eval_context as pass_eval_context
+from .utils import select_autoescape as select_autoescape
+
+__version__ = "3.1.2"
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..b002984
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/_identifier.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/_identifier.cpython-39.pyc
new file mode 100644
index 0000000..ad41076
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/_identifier.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/async_utils.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/async_utils.cpython-39.pyc
new file mode 100644
index 0000000..3f41c53
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/async_utils.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/bccache.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/bccache.cpython-39.pyc
new file mode 100644
index 0000000..f03a3a3
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/bccache.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/compiler.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/compiler.cpython-39.pyc
new file mode 100644
index 0000000..c9e93bb
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/compiler.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/constants.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/constants.cpython-39.pyc
new file mode 100644
index 0000000..c5debcc
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/constants.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/debug.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/debug.cpython-39.pyc
new file mode 100644
index 0000000..73e8431
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/debug.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/defaults.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/defaults.cpython-39.pyc
new file mode 100644
index 0000000..5f5c386
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/defaults.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/environment.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/environment.cpython-39.pyc
new file mode 100644
index 0000000..eb27349
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/environment.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/exceptions.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/exceptions.cpython-39.pyc
new file mode 100644
index 0000000..4e4d244
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/exceptions.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/ext.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/ext.cpython-39.pyc
new file mode 100644
index 0000000..1d642d6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/ext.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/filters.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/filters.cpython-39.pyc
new file mode 100644
index 0000000..1291ff5
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/filters.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/idtracking.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/idtracking.cpython-39.pyc
new file mode 100644
index 0000000..f9a8ec0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/idtracking.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/lexer.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/lexer.cpython-39.pyc
new file mode 100644
index 0000000..c192dfd
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/lexer.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/loaders.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/loaders.cpython-39.pyc
new file mode 100644
index 0000000..3ab690a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/loaders.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/meta.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/meta.cpython-39.pyc
new file mode 100644
index 0000000..28304af
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/meta.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/nativetypes.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/nativetypes.cpython-39.pyc
new file mode 100644
index 0000000..441de0f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/nativetypes.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/nodes.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/nodes.cpython-39.pyc
new file mode 100644
index 0000000..5cde8d1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/nodes.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/optimizer.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/optimizer.cpython-39.pyc
new file mode 100644
index 0000000..66e8724
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/optimizer.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/parser.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/parser.cpython-39.pyc
new file mode 100644
index 0000000..2190e96
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/parser.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/runtime.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/runtime.cpython-39.pyc
new file mode 100644
index 0000000..7a8837c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/runtime.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/sandbox.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/sandbox.cpython-39.pyc
new file mode 100644
index 0000000..703ae07
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/sandbox.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/tests.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/tests.cpython-39.pyc
new file mode 100644
index 0000000..d0c811d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/tests.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/utils.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/utils.cpython-39.pyc
new file mode 100644
index 0000000..aba1520
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/utils.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/__pycache__/visitor.cpython-39.pyc b/venv/lib/python3.9/site-packages/jinja2/__pycache__/visitor.cpython-39.pyc
new file mode 100644
index 0000000..90e42bc
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/__pycache__/visitor.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/jinja2/_identifier.py b/venv/lib/python3.9/site-packages/jinja2/_identifier.py
new file mode 100644
index 0000000..928c150
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/_identifier.py
@@ -0,0 +1,6 @@
+import re
+
+# generated by scripts/generate_identifier_pattern.py
+pattern = re.compile(
+    r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+"  # noqa: B950
+)
diff --git a/venv/lib/python3.9/site-packages/jinja2/async_utils.py b/venv/lib/python3.9/site-packages/jinja2/async_utils.py
new file mode 100644
index 0000000..1a4f389
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/async_utils.py
@@ -0,0 +1,84 @@
+import inspect
+import typing as t
+from functools import WRAPPER_ASSIGNMENTS
+from functools import wraps
+
+from .utils import _PassArg
+from .utils import pass_eval_context
+
+V = t.TypeVar("V")
+
+
+def async_variant(normal_func):  # type: ignore
+    def decorator(async_func):  # type: ignore
+        pass_arg = _PassArg.from_obj(normal_func)
+        need_eval_context = pass_arg is None
+
+        if pass_arg is _PassArg.environment:
+
+            def is_async(args: t.Any) -> bool:
+                return t.cast(bool, args[0].is_async)
+
+        else:
+
+            def is_async(args: t.Any) -> bool:
+                return t.cast(bool, args[0].environment.is_async)
+
+        # Take the doc and annotations from the sync function, but the
+        # name from the async function. Pallets-Sphinx-Themes
+        # build_function_directive expects __wrapped__ to point to the
+        # sync function.
+        async_func_attrs = ("__module__", "__name__", "__qualname__")
+        normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs))
+
+        @wraps(normal_func, assigned=normal_func_attrs)
+        @wraps(async_func, assigned=async_func_attrs, updated=())
+        def wrapper(*args, **kwargs):  # type: ignore
+            b = is_async(args)
+
+            if need_eval_context:
+                args = args[1:]
+
+            if b:
+                return async_func(*args, **kwargs)
+
+            return normal_func(*args, **kwargs)
+
+        if need_eval_context:
+            wrapper = pass_eval_context(wrapper)
+
+        wrapper.jinja_async_variant = True
+        return wrapper
+
+    return decorator
+
+
+_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)}
+
+
+async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V":
+    # Avoid a costly call to isawaitable
+    if type(value) in _common_primitives:
+        return t.cast("V", value)
+
+    if inspect.isawaitable(value):
+        return await t.cast("t.Awaitable[V]", value)
+
+    return t.cast("V", value)
+
+
+async def auto_aiter(
+    iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+) -> "t.AsyncIterator[V]":
+    if hasattr(iterable, "__aiter__"):
+        async for item in t.cast("t.AsyncIterable[V]", iterable):
+            yield item
+    else:
+        for item in t.cast("t.Iterable[V]", iterable):
+            yield item
+
+
+async def auto_to_list(
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+) -> t.List["V"]:
+    return [x async for x in auto_aiter(value)]
diff --git a/venv/lib/python3.9/site-packages/jinja2/bccache.py b/venv/lib/python3.9/site-packages/jinja2/bccache.py
new file mode 100644
index 0000000..d0ddf56
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/bccache.py
@@ -0,0 +1,406 @@
+"""The optional bytecode cache system. This is useful if you have very
+complex template situations and the compilation of all those templates
+slows down your application too much.
+
+Situations where this is useful are often forking web applications that
+are initialized on the first request.
+"""
+import errno
+import fnmatch
+import marshal
+import os
+import pickle
+import stat
+import sys
+import tempfile
+import typing as t
+from hashlib import sha1
+from io import BytesIO
+from types import CodeType
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .environment import Environment
+
+    class _MemcachedClient(te.Protocol):
+        def get(self, key: str) -> bytes:
+            ...
+
+        def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None:
+            ...
+
+
+bc_version = 5
+# Magic bytes to identify Jinja bytecode cache files. Contains the
+# Python major and minor version to avoid loading incompatible bytecode
+# if a project upgrades its Python version.
+bc_magic = (
+    b"j2"
+    + pickle.dumps(bc_version, 2)
+    + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2)
+)
+
+
+class Bucket:
+    """Buckets are used to store the bytecode for one template.  It's created
+    and initialized by the bytecode cache and passed to the loading functions.
+
+    The buckets get an internal checksum from the cache assigned and use this
+    to automatically reject outdated cache material.  Individual bytecode
+    cache subclasses don't have to care about cache invalidation.
+    """
+
+    def __init__(self, environment: "Environment", key: str, checksum: str) -> None:
+        self.environment = environment
+        self.key = key
+        self.checksum = checksum
+        self.reset()
+
+    def reset(self) -> None:
+        """Resets the bucket (unloads the bytecode)."""
+        self.code: t.Optional[CodeType] = None
+
+    def load_bytecode(self, f: t.BinaryIO) -> None:
+        """Loads bytecode from a file or file like object."""
+        # make sure the magic header is correct
+        magic = f.read(len(bc_magic))
+        if magic != bc_magic:
+            self.reset()
+            return
+        # the source code of the file changed, we need to reload
+        checksum = pickle.load(f)
+        if self.checksum != checksum:
+            self.reset()
+            return
+        # if marshal_load fails then we need to reload
+        try:
+            self.code = marshal.load(f)
+        except (EOFError, ValueError, TypeError):
+            self.reset()
+            return
+
+    def write_bytecode(self, f: t.IO[bytes]) -> None:
+        """Dump the bytecode into the file or file like object passed."""
+        if self.code is None:
+            raise TypeError("can't write empty bucket")
+        f.write(bc_magic)
+        pickle.dump(self.checksum, f, 2)
+        marshal.dump(self.code, f)
+
+    def bytecode_from_string(self, string: bytes) -> None:
+        """Load bytecode from bytes."""
+        self.load_bytecode(BytesIO(string))
+
+    def bytecode_to_string(self) -> bytes:
+        """Return the bytecode as bytes."""
+        out = BytesIO()
+        self.write_bytecode(out)
+        return out.getvalue()
+
+
+class BytecodeCache:
+    """To implement your own bytecode cache you have to subclass this class
+    and override :meth:`load_bytecode` and :meth:`dump_bytecode`.  Both of
+    these methods are passed a :class:`~jinja2.bccache.Bucket`.
+
+    A very basic bytecode cache that saves the bytecode on the file system::
+
+        from os import path
+
+        class MyCache(BytecodeCache):
+
+            def __init__(self, directory):
+                self.directory = directory
+
+            def load_bytecode(self, bucket):
+                filename = path.join(self.directory, bucket.key)
+                if path.exists(filename):
+                    with open(filename, 'rb') as f:
+                        bucket.load_bytecode(f)
+
+            def dump_bytecode(self, bucket):
+                filename = path.join(self.directory, bucket.key)
+                with open(filename, 'wb') as f:
+                    bucket.write_bytecode(f)
+
+    A more advanced version of a filesystem based bytecode cache is part of
+    Jinja.
+    """
+
+    def load_bytecode(self, bucket: Bucket) -> None:
+        """Subclasses have to override this method to load bytecode into a
+        bucket.  If they are not able to find code in the cache for the
+        bucket, it must not do anything.
+        """
+        raise NotImplementedError()
+
+    def dump_bytecode(self, bucket: Bucket) -> None:
+        """Subclasses have to override this method to write the bytecode
+        from a bucket back to the cache.  If it unable to do so it must not
+        fail silently but raise an exception.
+        """
+        raise NotImplementedError()
+
+    def clear(self) -> None:
+        """Clears the cache.  This method is not used by Jinja but should be
+        implemented to allow applications to clear the bytecode cache used
+        by a particular environment.
+        """
+
+    def get_cache_key(
+        self, name: str, filename: t.Optional[t.Union[str]] = None
+    ) -> str:
+        """Returns the unique hash key for this template name."""
+        hash = sha1(name.encode("utf-8"))
+
+        if filename is not None:
+            hash.update(f"|{filename}".encode())
+
+        return hash.hexdigest()
+
+    def get_source_checksum(self, source: str) -> str:
+        """Returns a checksum for the source."""
+        return sha1(source.encode("utf-8")).hexdigest()
+
+    def get_bucket(
+        self,
+        environment: "Environment",
+        name: str,
+        filename: t.Optional[str],
+        source: str,
+    ) -> Bucket:
+        """Return a cache bucket for the given template.  All arguments are
+        mandatory but filename may be `None`.
+        """
+        key = self.get_cache_key(name, filename)
+        checksum = self.get_source_checksum(source)
+        bucket = Bucket(environment, key, checksum)
+        self.load_bytecode(bucket)
+        return bucket
+
+    def set_bucket(self, bucket: Bucket) -> None:
+        """Put the bucket into the cache."""
+        self.dump_bytecode(bucket)
+
+
+class FileSystemBytecodeCache(BytecodeCache):
+    """A bytecode cache that stores bytecode on the filesystem.  It accepts
+    two arguments: The directory where the cache items are stored and a
+    pattern string that is used to build the filename.
+
+    If no directory is specified a default cache directory is selected.  On
+    Windows the user's temp directory is used, on UNIX systems a directory
+    is created for the user in the system temp directory.
+
+    The pattern can be used to have multiple separate caches operate on the
+    same directory.  The default pattern is ``'__jinja2_%s.cache'``.  ``%s``
+    is replaced with the cache key.
+
+    >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
+
+    This bytecode cache supports clearing of the cache using the clear method.
+    """
+
+    def __init__(
+        self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache"
+    ) -> None:
+        if directory is None:
+            directory = self._get_default_cache_dir()
+        self.directory = directory
+        self.pattern = pattern
+
+    def _get_default_cache_dir(self) -> str:
+        def _unsafe_dir() -> "te.NoReturn":
+            raise RuntimeError(
+                "Cannot determine safe temp directory.  You "
+                "need to explicitly provide one."
+            )
+
+        tmpdir = tempfile.gettempdir()
+
+        # On windows the temporary directory is used specific unless
+        # explicitly forced otherwise.  We can just use that.
+        if os.name == "nt":
+            return tmpdir
+        if not hasattr(os, "getuid"):
+            _unsafe_dir()
+
+        dirname = f"_jinja2-cache-{os.getuid()}"
+        actual_dir = os.path.join(tmpdir, dirname)
+
+        try:
+            os.mkdir(actual_dir, stat.S_IRWXU)
+        except OSError as e:
+            if e.errno != errno.EEXIST:
+                raise
+        try:
+            os.chmod(actual_dir, stat.S_IRWXU)
+            actual_dir_stat = os.lstat(actual_dir)
+            if (
+                actual_dir_stat.st_uid != os.getuid()
+                or not stat.S_ISDIR(actual_dir_stat.st_mode)
+                or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
+            ):
+                _unsafe_dir()
+        except OSError as e:
+            if e.errno != errno.EEXIST:
+                raise
+
+        actual_dir_stat = os.lstat(actual_dir)
+        if (
+            actual_dir_stat.st_uid != os.getuid()
+            or not stat.S_ISDIR(actual_dir_stat.st_mode)
+            or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
+        ):
+            _unsafe_dir()
+
+        return actual_dir
+
+    def _get_cache_filename(self, bucket: Bucket) -> str:
+        return os.path.join(self.directory, self.pattern % (bucket.key,))
+
+    def load_bytecode(self, bucket: Bucket) -> None:
+        filename = self._get_cache_filename(bucket)
+
+        # Don't test for existence before opening the file, since the
+        # file could disappear after the test before the open.
+        try:
+            f = open(filename, "rb")
+        except (FileNotFoundError, IsADirectoryError, PermissionError):
+            # PermissionError can occur on Windows when an operation is
+            # in progress, such as calling clear().
+            return
+
+        with f:
+            bucket.load_bytecode(f)
+
+    def dump_bytecode(self, bucket: Bucket) -> None:
+        # Write to a temporary file, then rename to the real name after
+        # writing. This avoids another process reading the file before
+        # it is fully written.
+        name = self._get_cache_filename(bucket)
+        f = tempfile.NamedTemporaryFile(
+            mode="wb",
+            dir=os.path.dirname(name),
+            prefix=os.path.basename(name),
+            suffix=".tmp",
+            delete=False,
+        )
+
+        def remove_silent() -> None:
+            try:
+                os.remove(f.name)
+            except OSError:
+                # Another process may have called clear(). On Windows,
+                # another program may be holding the file open.
+                pass
+
+        try:
+            with f:
+                bucket.write_bytecode(f)
+        except BaseException:
+            remove_silent()
+            raise
+
+        try:
+            os.replace(f.name, name)
+        except OSError:
+            # Another process may have called clear(). On Windows,
+            # another program may be holding the file open.
+            remove_silent()
+        except BaseException:
+            remove_silent()
+            raise
+
+    def clear(self) -> None:
+        # imported lazily here because google app-engine doesn't support
+        # write access on the file system and the function does not exist
+        # normally.
+        from os import remove
+
+        files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",))
+        for filename in files:
+            try:
+                remove(os.path.join(self.directory, filename))
+            except OSError:
+                pass
+
+
+class MemcachedBytecodeCache(BytecodeCache):
+    """This class implements a bytecode cache that uses a memcache cache for
+    storing the information.  It does not enforce a specific memcache library
+    (tummy's memcache or cmemcache) but will accept any class that provides
+    the minimal interface required.
+
+    Libraries compatible with this class:
+
+    -   `cachelib <https://github.com/pallets/cachelib>`_
+    -   `python-memcached <https://pypi.org/project/python-memcached/>`_
+
+    (Unfortunately the django cache interface is not compatible because it
+    does not support storing binary data, only text. You can however pass
+    the underlying cache client to the bytecode cache which is available
+    as `django.core.cache.cache._client`.)
+
+    The minimal interface for the client passed to the constructor is this:
+
+    .. class:: MinimalClientInterface
+
+        .. method:: set(key, value[, timeout])
+
+            Stores the bytecode in the cache.  `value` is a string and
+            `timeout` the timeout of the key.  If timeout is not provided
+            a default timeout or no timeout should be assumed, if it's
+            provided it's an integer with the number of seconds the cache
+            item should exist.
+
+        .. method:: get(key)
+
+            Returns the value for the cache key.  If the item does not
+            exist in the cache the return value must be `None`.
+
+    The other arguments to the constructor are the prefix for all keys that
+    is added before the actual cache key and the timeout for the bytecode in
+    the cache system.  We recommend a high (or no) timeout.
+
+    This bytecode cache does not support clearing of used items in the cache.
+    The clear method is a no-operation function.
+
+    .. versionadded:: 2.7
+       Added support for ignoring memcache errors through the
+       `ignore_memcache_errors` parameter.
+    """
+
+    def __init__(
+        self,
+        client: "_MemcachedClient",
+        prefix: str = "jinja2/bytecode/",
+        timeout: t.Optional[int] = None,
+        ignore_memcache_errors: bool = True,
+    ):
+        self.client = client
+        self.prefix = prefix
+        self.timeout = timeout
+        self.ignore_memcache_errors = ignore_memcache_errors
+
+    def load_bytecode(self, bucket: Bucket) -> None:
+        try:
+            code = self.client.get(self.prefix + bucket.key)
+        except Exception:
+            if not self.ignore_memcache_errors:
+                raise
+        else:
+            bucket.bytecode_from_string(code)
+
+    def dump_bytecode(self, bucket: Bucket) -> None:
+        key = self.prefix + bucket.key
+        value = bucket.bytecode_to_string()
+
+        try:
+            if self.timeout is not None:
+                self.client.set(key, value, self.timeout)
+            else:
+                self.client.set(key, value)
+        except Exception:
+            if not self.ignore_memcache_errors:
+                raise
diff --git a/venv/lib/python3.9/site-packages/jinja2/compiler.py b/venv/lib/python3.9/site-packages/jinja2/compiler.py
new file mode 100644
index 0000000..3458095
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/compiler.py
@@ -0,0 +1,1957 @@
+"""Compiles nodes from the parser into Python code."""
+import typing as t
+from contextlib import contextmanager
+from functools import update_wrapper
+from io import StringIO
+from itertools import chain
+from keyword import iskeyword as is_python_keyword
+
+from markupsafe import escape
+from markupsafe import Markup
+
+from . import nodes
+from .exceptions import TemplateAssertionError
+from .idtracking import Symbols
+from .idtracking import VAR_LOAD_ALIAS
+from .idtracking import VAR_LOAD_PARAMETER
+from .idtracking import VAR_LOAD_RESOLVE
+from .idtracking import VAR_LOAD_UNDEFINED
+from .nodes import EvalContext
+from .optimizer import Optimizer
+from .utils import _PassArg
+from .utils import concat
+from .visitor import NodeVisitor
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .environment import Environment
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
+operators = {
+    "eq": "==",
+    "ne": "!=",
+    "gt": ">",
+    "gteq": ">=",
+    "lt": "<",
+    "lteq": "<=",
+    "in": "in",
+    "notin": "not in",
+}
+
+
+def optimizeconst(f: F) -> F:
+    def new_func(
+        self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any
+    ) -> t.Any:
+        # Only optimize if the frame is not volatile
+        if self.optimizer is not None and not frame.eval_ctx.volatile:
+            new_node = self.optimizer.visit(node, frame.eval_ctx)
+
+            if new_node != node:
+                return self.visit(new_node, frame)
+
+        return f(self, node, frame, **kwargs)
+
+    return update_wrapper(t.cast(F, new_func), f)
+
+
+def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]:
+    @optimizeconst
+    def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None:
+        if (
+            self.environment.sandboxed
+            and op in self.environment.intercepted_binops  # type: ignore
+        ):
+            self.write(f"environment.call_binop(context, {op!r}, ")
+            self.visit(node.left, frame)
+            self.write(", ")
+            self.visit(node.right, frame)
+        else:
+            self.write("(")
+            self.visit(node.left, frame)
+            self.write(f" {op} ")
+            self.visit(node.right, frame)
+
+        self.write(")")
+
+    return visitor
+
+
+def _make_unop(
+    op: str,
+) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]:
+    @optimizeconst
+    def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None:
+        if (
+            self.environment.sandboxed
+            and op in self.environment.intercepted_unops  # type: ignore
+        ):
+            self.write(f"environment.call_unop(context, {op!r}, ")
+            self.visit(node.node, frame)
+        else:
+            self.write("(" + op)
+            self.visit(node.node, frame)
+
+        self.write(")")
+
+    return visitor
+
+
+def generate(
+    node: nodes.Template,
+    environment: "Environment",
+    name: t.Optional[str],
+    filename: t.Optional[str],
+    stream: t.Optional[t.TextIO] = None,
+    defer_init: bool = False,
+    optimized: bool = True,
+) -> t.Optional[str]:
+    """Generate the python source for a node tree."""
+    if not isinstance(node, nodes.Template):
+        raise TypeError("Can't compile non template nodes")
+
+    generator = environment.code_generator_class(
+        environment, name, filename, stream, defer_init, optimized
+    )
+    generator.visit(node)
+
+    if stream is None:
+        return generator.stream.getvalue()  # type: ignore
+
+    return None
+
+
+def has_safe_repr(value: t.Any) -> bool:
+    """Does the node have a safe representation?"""
+    if value is None or value is NotImplemented or value is Ellipsis:
+        return True
+
+    if type(value) in {bool, int, float, complex, range, str, Markup}:
+        return True
+
+    if type(value) in {tuple, list, set, frozenset}:
+        return all(has_safe_repr(v) for v in value)
+
+    if type(value) is dict:
+        return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items())
+
+    return False
+
+
+def find_undeclared(
+    nodes: t.Iterable[nodes.Node], names: t.Iterable[str]
+) -> t.Set[str]:
+    """Check if the names passed are accessed undeclared.  The return value
+    is a set of all the undeclared names from the sequence of names found.
+    """
+    visitor = UndeclaredNameVisitor(names)
+    try:
+        for node in nodes:
+            visitor.visit(node)
+    except VisitorExit:
+        pass
+    return visitor.undeclared
+
+
+class MacroRef:
+    def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None:
+        self.node = node
+        self.accesses_caller = False
+        self.accesses_kwargs = False
+        self.accesses_varargs = False
+
+
+class Frame:
+    """Holds compile time information for us."""
+
+    def __init__(
+        self,
+        eval_ctx: EvalContext,
+        parent: t.Optional["Frame"] = None,
+        level: t.Optional[int] = None,
+    ) -> None:
+        self.eval_ctx = eval_ctx
+
+        # the parent of this frame
+        self.parent = parent
+
+        if parent is None:
+            self.symbols = Symbols(level=level)
+
+            # in some dynamic inheritance situations the compiler needs to add
+            # write tests around output statements.
+            self.require_output_check = False
+
+            # inside some tags we are using a buffer rather than yield statements.
+            # this for example affects {% filter %} or {% macro %}.  If a frame
+            # is buffered this variable points to the name of the list used as
+            # buffer.
+            self.buffer: t.Optional[str] = None
+
+            # the name of the block we're in, otherwise None.
+            self.block: t.Optional[str] = None
+
+        else:
+            self.symbols = Symbols(parent.symbols, level=level)
+            self.require_output_check = parent.require_output_check
+            self.buffer = parent.buffer
+            self.block = parent.block
+
+        # a toplevel frame is the root + soft frames such as if conditions.
+        self.toplevel = False
+
+        # the root frame is basically just the outermost frame, so no if
+        # conditions.  This information is used to optimize inheritance
+        # situations.
+        self.rootlevel = False
+
+        # variables set inside of loops and blocks should not affect outer frames,
+        # but they still needs to be kept track of as part of the active context.
+        self.loop_frame = False
+        self.block_frame = False
+
+        # track whether the frame is being used in an if-statement or conditional
+        # expression as it determines which errors should be raised during runtime
+        # or compile time.
+        self.soft_frame = False
+
+    def copy(self) -> "Frame":
+        """Create a copy of the current one."""
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.symbols = self.symbols.copy()
+        return rv
+
+    def inner(self, isolated: bool = False) -> "Frame":
+        """Return an inner frame."""
+        if isolated:
+            return Frame(self.eval_ctx, level=self.symbols.level + 1)
+        return Frame(self.eval_ctx, self)
+
+    def soft(self) -> "Frame":
+        """Return a soft frame.  A soft frame may not be modified as
+        standalone thing as it shares the resources with the frame it
+        was created of, but it's not a rootlevel frame any longer.
+
+        This is only used to implement if-statements and conditional
+        expressions.
+        """
+        rv = self.copy()
+        rv.rootlevel = False
+        rv.soft_frame = True
+        return rv
+
+    __copy__ = copy
+
+
+class VisitorExit(RuntimeError):
+    """Exception used by the `UndeclaredNameVisitor` to signal a stop."""
+
+
+class DependencyFinderVisitor(NodeVisitor):
+    """A visitor that collects filter and test calls."""
+
+    def __init__(self) -> None:
+        self.filters: t.Set[str] = set()
+        self.tests: t.Set[str] = set()
+
+    def visit_Filter(self, node: nodes.Filter) -> None:
+        self.generic_visit(node)
+        self.filters.add(node.name)
+
+    def visit_Test(self, node: nodes.Test) -> None:
+        self.generic_visit(node)
+        self.tests.add(node.name)
+
+    def visit_Block(self, node: nodes.Block) -> None:
+        """Stop visiting at blocks."""
+
+
+class UndeclaredNameVisitor(NodeVisitor):
+    """A visitor that checks if a name is accessed without being
+    declared.  This is different from the frame visitor as it will
+    not stop at closure frames.
+    """
+
+    def __init__(self, names: t.Iterable[str]) -> None:
+        self.names = set(names)
+        self.undeclared: t.Set[str] = set()
+
+    def visit_Name(self, node: nodes.Name) -> None:
+        if node.ctx == "load" and node.name in self.names:
+            self.undeclared.add(node.name)
+            if self.undeclared == self.names:
+                raise VisitorExit()
+        else:
+            self.names.discard(node.name)
+
+    def visit_Block(self, node: nodes.Block) -> None:
+        """Stop visiting a blocks."""
+
+
+class CompilerExit(Exception):
+    """Raised if the compiler encountered a situation where it just
+    doesn't make sense to further process the code.  Any block that
+    raises such an exception is not further processed.
+    """
+
+
+class CodeGenerator(NodeVisitor):
+    def __init__(
+        self,
+        environment: "Environment",
+        name: t.Optional[str],
+        filename: t.Optional[str],
+        stream: t.Optional[t.TextIO] = None,
+        defer_init: bool = False,
+        optimized: bool = True,
+    ) -> None:
+        if stream is None:
+            stream = StringIO()
+        self.environment = environment
+        self.name = name
+        self.filename = filename
+        self.stream = stream
+        self.created_block_context = False
+        self.defer_init = defer_init
+        self.optimizer: t.Optional[Optimizer] = None
+
+        if optimized:
+            self.optimizer = Optimizer(environment)
+
+        # aliases for imports
+        self.import_aliases: t.Dict[str, str] = {}
+
+        # a registry for all blocks.  Because blocks are moved out
+        # into the global python scope they are registered here
+        self.blocks: t.Dict[str, nodes.Block] = {}
+
+        # the number of extends statements so far
+        self.extends_so_far = 0
+
+        # some templates have a rootlevel extends.  In this case we
+        # can safely assume that we're a child template and do some
+        # more optimizations.
+        self.has_known_extends = False
+
+        # the current line number
+        self.code_lineno = 1
+
+        # registry of all filters and tests (global, not block local)
+        self.tests: t.Dict[str, str] = {}
+        self.filters: t.Dict[str, str] = {}
+
+        # the debug information
+        self.debug_info: t.List[t.Tuple[int, int]] = []
+        self._write_debug_info: t.Optional[int] = None
+
+        # the number of new lines before the next write()
+        self._new_lines = 0
+
+        # the line number of the last written statement
+        self._last_line = 0
+
+        # true if nothing was written so far.
+        self._first_write = True
+
+        # used by the `temporary_identifier` method to get new
+        # unique, temporary identifier
+        self._last_identifier = 0
+
+        # the current indentation
+        self._indentation = 0
+
+        # Tracks toplevel assignments
+        self._assign_stack: t.List[t.Set[str]] = []
+
+        # Tracks parameter definition blocks
+        self._param_def_block: t.List[t.Set[str]] = []
+
+        # Tracks the current context.
+        self._context_reference_stack = ["context"]
+
+    @property
+    def optimized(self) -> bool:
+        return self.optimizer is not None
+
+    # -- Various compilation helpers
+
+    def fail(self, msg: str, lineno: int) -> "te.NoReturn":
+        """Fail with a :exc:`TemplateAssertionError`."""
+        raise TemplateAssertionError(msg, lineno, self.name, self.filename)
+
+    def temporary_identifier(self) -> str:
+        """Get a new unique identifier."""
+        self._last_identifier += 1
+        return f"t_{self._last_identifier}"
+
+    def buffer(self, frame: Frame) -> None:
+        """Enable buffering for the frame from that point onwards."""
+        frame.buffer = self.temporary_identifier()
+        self.writeline(f"{frame.buffer} = []")
+
+    def return_buffer_contents(
+        self, frame: Frame, force_unescaped: bool = False
+    ) -> None:
+        """Return the buffer contents of the frame."""
+        if not force_unescaped:
+            if frame.eval_ctx.volatile:
+                self.writeline("if context.eval_ctx.autoescape:")
+                self.indent()
+                self.writeline(f"return Markup(concat({frame.buffer}))")
+                self.outdent()
+                self.writeline("else:")
+                self.indent()
+                self.writeline(f"return concat({frame.buffer})")
+                self.outdent()
+                return
+            elif frame.eval_ctx.autoescape:
+                self.writeline(f"return Markup(concat({frame.buffer}))")
+                return
+        self.writeline(f"return concat({frame.buffer})")
+
+    def indent(self) -> None:
+        """Indent by one."""
+        self._indentation += 1
+
+    def outdent(self, step: int = 1) -> None:
+        """Outdent by step."""
+        self._indentation -= step
+
+    def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None:
+        """Yield or write into the frame buffer."""
+        if frame.buffer is None:
+            self.writeline("yield ", node)
+        else:
+            self.writeline(f"{frame.buffer}.append(", node)
+
+    def end_write(self, frame: Frame) -> None:
+        """End the writing process started by `start_write`."""
+        if frame.buffer is not None:
+            self.write(")")
+
+    def simple_write(
+        self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None
+    ) -> None:
+        """Simple shortcut for start_write + write + end_write."""
+        self.start_write(frame, node)
+        self.write(s)
+        self.end_write(frame)
+
+    def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None:
+        """Visit a list of nodes as block in a frame.  If the current frame
+        is no buffer a dummy ``if 0: yield None`` is written automatically.
+        """
+        try:
+            self.writeline("pass")
+            for node in nodes:
+                self.visit(node, frame)
+        except CompilerExit:
+            pass
+
+    def write(self, x: str) -> None:
+        """Write a string into the output stream."""
+        if self._new_lines:
+            if not self._first_write:
+                self.stream.write("\n" * self._new_lines)
+                self.code_lineno += self._new_lines
+                if self._write_debug_info is not None:
+                    self.debug_info.append((self._write_debug_info, self.code_lineno))
+                    self._write_debug_info = None
+            self._first_write = False
+            self.stream.write("    " * self._indentation)
+            self._new_lines = 0
+        self.stream.write(x)
+
+    def writeline(
+        self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0
+    ) -> None:
+        """Combination of newline and write."""
+        self.newline(node, extra)
+        self.write(x)
+
+    def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None:
+        """Add one or more newlines before the next write."""
+        self._new_lines = max(self._new_lines, 1 + extra)
+        if node is not None and node.lineno != self._last_line:
+            self._write_debug_info = node.lineno
+            self._last_line = node.lineno
+
+    def signature(
+        self,
+        node: t.Union[nodes.Call, nodes.Filter, nodes.Test],
+        frame: Frame,
+        extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
+    ) -> None:
+        """Writes a function call to the stream for the current node.
+        A leading comma is added automatically.  The extra keyword
+        arguments may not include python keywords otherwise a syntax
+        error could occur.  The extra keyword arguments should be given
+        as python dict.
+        """
+        # if any of the given keyword arguments is a python keyword
+        # we have to make sure that no invalid call is created.
+        kwarg_workaround = any(
+            is_python_keyword(t.cast(str, k))
+            for k in chain((x.key for x in node.kwargs), extra_kwargs or ())
+        )
+
+        for arg in node.args:
+            self.write(", ")
+            self.visit(arg, frame)
+
+        if not kwarg_workaround:
+            for kwarg in node.kwargs:
+                self.write(", ")
+                self.visit(kwarg, frame)
+            if extra_kwargs is not None:
+                for key, value in extra_kwargs.items():
+                    self.write(f", {key}={value}")
+        if node.dyn_args:
+            self.write(", *")
+            self.visit(node.dyn_args, frame)
+
+        if kwarg_workaround:
+            if node.dyn_kwargs is not None:
+                self.write(", **dict({")
+            else:
+                self.write(", **{")
+            for kwarg in node.kwargs:
+                self.write(f"{kwarg.key!r}: ")
+                self.visit(kwarg.value, frame)
+                self.write(", ")
+            if extra_kwargs is not None:
+                for key, value in extra_kwargs.items():
+                    self.write(f"{key!r}: {value}, ")
+            if node.dyn_kwargs is not None:
+                self.write("}, **")
+                self.visit(node.dyn_kwargs, frame)
+                self.write(")")
+            else:
+                self.write("}")
+
+        elif node.dyn_kwargs is not None:
+            self.write(", **")
+            self.visit(node.dyn_kwargs, frame)
+
+    def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None:
+        """Find all filter and test names used in the template and
+        assign them to variables in the compiled namespace. Checking
+        that the names are registered with the environment is done when
+        compiling the Filter and Test nodes. If the node is in an If or
+        CondExpr node, the check is done at runtime instead.
+
+        .. versionchanged:: 3.0
+            Filters and tests in If and CondExpr nodes are checked at
+            runtime instead of compile time.
+        """
+        visitor = DependencyFinderVisitor()
+
+        for node in nodes:
+            visitor.visit(node)
+
+        for id_map, names, dependency in (self.filters, visitor.filters, "filters"), (
+            self.tests,
+            visitor.tests,
+            "tests",
+        ):
+            for name in sorted(names):
+                if name not in id_map:
+                    id_map[name] = self.temporary_identifier()
+
+                # add check during runtime that dependencies used inside of executed
+                # blocks are defined, as this step may be skipped during compile time
+                self.writeline("try:")
+                self.indent()
+                self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]")
+                self.outdent()
+                self.writeline("except KeyError:")
+                self.indent()
+                self.writeline("@internalcode")
+                self.writeline(f"def {id_map[name]}(*unused):")
+                self.indent()
+                self.writeline(
+                    f'raise TemplateRuntimeError("No {dependency[:-1]}'
+                    f' named {name!r} found.")'
+                )
+                self.outdent()
+                self.outdent()
+
+    def enter_frame(self, frame: Frame) -> None:
+        undefs = []
+        for target, (action, param) in frame.symbols.loads.items():
+            if action == VAR_LOAD_PARAMETER:
+                pass
+            elif action == VAR_LOAD_RESOLVE:
+                self.writeline(f"{target} = {self.get_resolve_func()}({param!r})")
+            elif action == VAR_LOAD_ALIAS:
+                self.writeline(f"{target} = {param}")
+            elif action == VAR_LOAD_UNDEFINED:
+                undefs.append(target)
+            else:
+                raise NotImplementedError("unknown load instruction")
+        if undefs:
+            self.writeline(f"{' = '.join(undefs)} = missing")
+
+    def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None:
+        if not with_python_scope:
+            undefs = []
+            for target in frame.symbols.loads:
+                undefs.append(target)
+            if undefs:
+                self.writeline(f"{' = '.join(undefs)} = missing")
+
+    def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str:
+        return async_value if self.environment.is_async else sync_value
+
+    def func(self, name: str) -> str:
+        return f"{self.choose_async()}def {name}"
+
+    def macro_body(
+        self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame
+    ) -> t.Tuple[Frame, MacroRef]:
+        """Dump the function def of a macro or call block."""
+        frame = frame.inner()
+        frame.symbols.analyze_node(node)
+        macro_ref = MacroRef(node)
+
+        explicit_caller = None
+        skip_special_params = set()
+        args = []
+
+        for idx, arg in enumerate(node.args):
+            if arg.name == "caller":
+                explicit_caller = idx
+            if arg.name in ("kwargs", "varargs"):
+                skip_special_params.add(arg.name)
+            args.append(frame.symbols.ref(arg.name))
+
+        undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs"))
+
+        if "caller" in undeclared:
+            # In older Jinja versions there was a bug that allowed caller
+            # to retain the special behavior even if it was mentioned in
+            # the argument list.  However thankfully this was only really
+            # working if it was the last argument.  So we are explicitly
+            # checking this now and error out if it is anywhere else in
+            # the argument list.
+            if explicit_caller is not None:
+                try:
+                    node.defaults[explicit_caller - len(node.args)]
+                except IndexError:
+                    self.fail(
+                        "When defining macros or call blocks the "
+                        'special "caller" argument must be omitted '
+                        "or be given a default.",
+                        node.lineno,
+                    )
+            else:
+                args.append(frame.symbols.declare_parameter("caller"))
+            macro_ref.accesses_caller = True
+        if "kwargs" in undeclared and "kwargs" not in skip_special_params:
+            args.append(frame.symbols.declare_parameter("kwargs"))
+            macro_ref.accesses_kwargs = True
+        if "varargs" in undeclared and "varargs" not in skip_special_params:
+            args.append(frame.symbols.declare_parameter("varargs"))
+            macro_ref.accesses_varargs = True
+
+        # macros are delayed, they never require output checks
+        frame.require_output_check = False
+        frame.symbols.analyze_node(node)
+        self.writeline(f"{self.func('macro')}({', '.join(args)}):", node)
+        self.indent()
+
+        self.buffer(frame)
+        self.enter_frame(frame)
+
+        self.push_parameter_definitions(frame)
+        for idx, arg in enumerate(node.args):
+            ref = frame.symbols.ref(arg.name)
+            self.writeline(f"if {ref} is missing:")
+            self.indent()
+            try:
+                default = node.defaults[idx - len(node.args)]
+            except IndexError:
+                self.writeline(
+                    f'{ref} = undefined("parameter {arg.name!r} was not provided",'
+                    f" name={arg.name!r})"
+                )
+            else:
+                self.writeline(f"{ref} = ")
+                self.visit(default, frame)
+            self.mark_parameter_stored(ref)
+            self.outdent()
+        self.pop_parameter_definitions()
+
+        self.blockvisit(node.body, frame)
+        self.return_buffer_contents(frame, force_unescaped=True)
+        self.leave_frame(frame, with_python_scope=True)
+        self.outdent()
+
+        return frame, macro_ref
+
+    def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None:
+        """Dump the macro definition for the def created by macro_body."""
+        arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args)
+        name = getattr(macro_ref.node, "name", None)
+        if len(macro_ref.node.args) == 1:
+            arg_tuple += ","
+        self.write(
+            f"Macro(environment, macro, {name!r}, ({arg_tuple}),"
+            f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r},"
+            f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)"
+        )
+
+    def position(self, node: nodes.Node) -> str:
+        """Return a human readable position for the node."""
+        rv = f"line {node.lineno}"
+        if self.name is not None:
+            rv = f"{rv} in {self.name!r}"
+        return rv
+
+    def dump_local_context(self, frame: Frame) -> str:
+        items_kv = ", ".join(
+            f"{name!r}: {target}"
+            for name, target in frame.symbols.dump_stores().items()
+        )
+        return f"{{{items_kv}}}"
+
+    def write_commons(self) -> None:
+        """Writes a common preamble that is used by root and block functions.
+        Primarily this sets up common local helpers and enforces a generator
+        through a dead branch.
+        """
+        self.writeline("resolve = context.resolve_or_missing")
+        self.writeline("undefined = environment.undefined")
+        self.writeline("concat = environment.concat")
+        # always use the standard Undefined class for the implicit else of
+        # conditional expressions
+        self.writeline("cond_expr_undefined = Undefined")
+        self.writeline("if 0: yield None")
+
+    def push_parameter_definitions(self, frame: Frame) -> None:
+        """Pushes all parameter targets from the given frame into a local
+        stack that permits tracking of yet to be assigned parameters.  In
+        particular this enables the optimization from `visit_Name` to skip
+        undefined expressions for parameters in macros as macros can reference
+        otherwise unbound parameters.
+        """
+        self._param_def_block.append(frame.symbols.dump_param_targets())
+
+    def pop_parameter_definitions(self) -> None:
+        """Pops the current parameter definitions set."""
+        self._param_def_block.pop()
+
+    def mark_parameter_stored(self, target: str) -> None:
+        """Marks a parameter in the current parameter definitions as stored.
+        This will skip the enforced undefined checks.
+        """
+        if self._param_def_block:
+            self._param_def_block[-1].discard(target)
+
+    def push_context_reference(self, target: str) -> None:
+        self._context_reference_stack.append(target)
+
+    def pop_context_reference(self) -> None:
+        self._context_reference_stack.pop()
+
+    def get_context_ref(self) -> str:
+        return self._context_reference_stack[-1]
+
+    def get_resolve_func(self) -> str:
+        target = self._context_reference_stack[-1]
+        if target == "context":
+            return "resolve"
+        return f"{target}.resolve"
+
+    def derive_context(self, frame: Frame) -> str:
+        return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})"
+
+    def parameter_is_undeclared(self, target: str) -> bool:
+        """Checks if a given target is an undeclared parameter."""
+        if not self._param_def_block:
+            return False
+        return target in self._param_def_block[-1]
+
+    def push_assign_tracking(self) -> None:
+        """Pushes a new layer for assignment tracking."""
+        self._assign_stack.append(set())
+
+    def pop_assign_tracking(self, frame: Frame) -> None:
+        """Pops the topmost level for assignment tracking and updates the
+        context variables if necessary.
+        """
+        vars = self._assign_stack.pop()
+        if (
+            not frame.block_frame
+            and not frame.loop_frame
+            and not frame.toplevel
+            or not vars
+        ):
+            return
+        public_names = [x for x in vars if x[:1] != "_"]
+        if len(vars) == 1:
+            name = next(iter(vars))
+            ref = frame.symbols.ref(name)
+            if frame.loop_frame:
+                self.writeline(f"_loop_vars[{name!r}] = {ref}")
+                return
+            if frame.block_frame:
+                self.writeline(f"_block_vars[{name!r}] = {ref}")
+                return
+            self.writeline(f"context.vars[{name!r}] = {ref}")
+        else:
+            if frame.loop_frame:
+                self.writeline("_loop_vars.update({")
+            elif frame.block_frame:
+                self.writeline("_block_vars.update({")
+            else:
+                self.writeline("context.vars.update({")
+            for idx, name in enumerate(vars):
+                if idx:
+                    self.write(", ")
+                ref = frame.symbols.ref(name)
+                self.write(f"{name!r}: {ref}")
+            self.write("})")
+        if not frame.block_frame and not frame.loop_frame and public_names:
+            if len(public_names) == 1:
+                self.writeline(f"context.exported_vars.add({public_names[0]!r})")
+            else:
+                names_str = ", ".join(map(repr, public_names))
+                self.writeline(f"context.exported_vars.update(({names_str}))")
+
+    # -- Statement Visitors
+
+    def visit_Template(
+        self, node: nodes.Template, frame: t.Optional[Frame] = None
+    ) -> None:
+        assert frame is None, "no root frame allowed"
+        eval_ctx = EvalContext(self.environment, self.name)
+
+        from .runtime import exported, async_exported
+
+        if self.environment.is_async:
+            exported_names = sorted(exported + async_exported)
+        else:
+            exported_names = sorted(exported)
+
+        self.writeline("from jinja2.runtime import " + ", ".join(exported_names))
+
+        # if we want a deferred initialization we cannot move the
+        # environment into a local name
+        envenv = "" if self.defer_init else ", environment=environment"
+
+        # do we have an extends tag at all?  If not, we can save some
+        # overhead by just not processing any inheritance code.
+        have_extends = node.find(nodes.Extends) is not None
+
+        # find all blocks
+        for block in node.find_all(nodes.Block):
+            if block.name in self.blocks:
+                self.fail(f"block {block.name!r} defined twice", block.lineno)
+            self.blocks[block.name] = block
+
+        # find all imports and import them
+        for import_ in node.find_all(nodes.ImportedName):
+            if import_.importname not in self.import_aliases:
+                imp = import_.importname
+                self.import_aliases[imp] = alias = self.temporary_identifier()
+                if "." in imp:
+                    module, obj = imp.rsplit(".", 1)
+                    self.writeline(f"from {module} import {obj} as {alias}")
+                else:
+                    self.writeline(f"import {imp} as {alias}")
+
+        # add the load name
+        self.writeline(f"name = {self.name!r}")
+
+        # generate the root render function.
+        self.writeline(
+            f"{self.func('root')}(context, missing=missing{envenv}):", extra=1
+        )
+        self.indent()
+        self.write_commons()
+
+        # process the root
+        frame = Frame(eval_ctx)
+        if "self" in find_undeclared(node.body, ("self",)):
+            ref = frame.symbols.declare_parameter("self")
+            self.writeline(f"{ref} = TemplateReference(context)")
+        frame.symbols.analyze_node(node)
+        frame.toplevel = frame.rootlevel = True
+        frame.require_output_check = have_extends and not self.has_known_extends
+        if have_extends:
+            self.writeline("parent_template = None")
+        self.enter_frame(frame)
+        self.pull_dependencies(node.body)
+        self.blockvisit(node.body, frame)
+        self.leave_frame(frame, with_python_scope=True)
+        self.outdent()
+
+        # make sure that the parent root is called.
+        if have_extends:
+            if not self.has_known_extends:
+                self.indent()
+                self.writeline("if parent_template is not None:")
+            self.indent()
+            if not self.environment.is_async:
+                self.writeline("yield from parent_template.root_render_func(context)")
+            else:
+                self.writeline(
+                    "async for event in parent_template.root_render_func(context):"
+                )
+                self.indent()
+                self.writeline("yield event")
+                self.outdent()
+            self.outdent(1 + (not self.has_known_extends))
+
+        # at this point we now have the blocks collected and can visit them too.
+        for name, block in self.blocks.items():
+            self.writeline(
+                f"{self.func('block_' + name)}(context, missing=missing{envenv}):",
+                block,
+                1,
+            )
+            self.indent()
+            self.write_commons()
+            # It's important that we do not make this frame a child of the
+            # toplevel template.  This would cause a variety of
+            # interesting issues with identifier tracking.
+            block_frame = Frame(eval_ctx)
+            block_frame.block_frame = True
+            undeclared = find_undeclared(block.body, ("self", "super"))
+            if "self" in undeclared:
+                ref = block_frame.symbols.declare_parameter("self")
+                self.writeline(f"{ref} = TemplateReference(context)")
+            if "super" in undeclared:
+                ref = block_frame.symbols.declare_parameter("super")
+                self.writeline(f"{ref} = context.super({name!r}, block_{name})")
+            block_frame.symbols.analyze_node(block)
+            block_frame.block = name
+            self.writeline("_block_vars = {}")
+            self.enter_frame(block_frame)
+            self.pull_dependencies(block.body)
+            self.blockvisit(block.body, block_frame)
+            self.leave_frame(block_frame, with_python_scope=True)
+            self.outdent()
+
+        blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks)
+        self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1)
+        debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info)
+        self.writeline(f"debug_info = {debug_kv_str!r}")
+
+    def visit_Block(self, node: nodes.Block, frame: Frame) -> None:
+        """Call a block and register it for the template."""
+        level = 0
+        if frame.toplevel:
+            # if we know that we are a child template, there is no need to
+            # check if we are one
+            if self.has_known_extends:
+                return
+            if self.extends_so_far > 0:
+                self.writeline("if parent_template is None:")
+                self.indent()
+                level += 1
+
+        if node.scoped:
+            context = self.derive_context(frame)
+        else:
+            context = self.get_context_ref()
+
+        if node.required:
+            self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node)
+            self.indent()
+            self.writeline(
+                f'raise TemplateRuntimeError("Required block {node.name!r} not found")',
+                node,
+            )
+            self.outdent()
+
+        if not self.environment.is_async and frame.buffer is None:
+            self.writeline(
+                f"yield from context.blocks[{node.name!r}][0]({context})", node
+            )
+        else:
+            self.writeline(
+                f"{self.choose_async()}for event in"
+                f" context.blocks[{node.name!r}][0]({context}):",
+                node,
+            )
+            self.indent()
+            self.simple_write("event", frame)
+            self.outdent()
+
+        self.outdent(level)
+
+    def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None:
+        """Calls the extender."""
+        if not frame.toplevel:
+            self.fail("cannot use extend from a non top-level scope", node.lineno)
+
+        # if the number of extends statements in general is zero so
+        # far, we don't have to add a check if something extended
+        # the template before this one.
+        if self.extends_so_far > 0:
+
+            # if we have a known extends we just add a template runtime
+            # error into the generated code.  We could catch that at compile
+            # time too, but i welcome it not to confuse users by throwing the
+            # same error at different times just "because we can".
+            if not self.has_known_extends:
+                self.writeline("if parent_template is not None:")
+                self.indent()
+            self.writeline('raise TemplateRuntimeError("extended multiple times")')
+
+            # if we have a known extends already we don't need that code here
+            # as we know that the template execution will end here.
+            if self.has_known_extends:
+                raise CompilerExit()
+            else:
+                self.outdent()
+
+        self.writeline("parent_template = environment.get_template(", node)
+        self.visit(node.template, frame)
+        self.write(f", {self.name!r})")
+        self.writeline("for name, parent_block in parent_template.blocks.items():")
+        self.indent()
+        self.writeline("context.blocks.setdefault(name, []).append(parent_block)")
+        self.outdent()
+
+        # if this extends statement was in the root level we can take
+        # advantage of that information and simplify the generated code
+        # in the top level from this point onwards
+        if frame.rootlevel:
+            self.has_known_extends = True
+
+        # and now we have one more
+        self.extends_so_far += 1
+
+    def visit_Include(self, node: nodes.Include, frame: Frame) -> None:
+        """Handles includes."""
+        if node.ignore_missing:
+            self.writeline("try:")
+            self.indent()
+
+        func_name = "get_or_select_template"
+        if isinstance(node.template, nodes.Const):
+            if isinstance(node.template.value, str):
+                func_name = "get_template"
+            elif isinstance(node.template.value, (tuple, list)):
+                func_name = "select_template"
+        elif isinstance(node.template, (nodes.Tuple, nodes.List)):
+            func_name = "select_template"
+
+        self.writeline(f"template = environment.{func_name}(", node)
+        self.visit(node.template, frame)
+        self.write(f", {self.name!r})")
+        if node.ignore_missing:
+            self.outdent()
+            self.writeline("except TemplateNotFound:")
+            self.indent()
+            self.writeline("pass")
+            self.outdent()
+            self.writeline("else:")
+            self.indent()
+
+        skip_event_yield = False
+        if node.with_context:
+            self.writeline(
+                f"{self.choose_async()}for event in template.root_render_func("
+                "template.new_context(context.get_all(), True,"
+                f" {self.dump_local_context(frame)})):"
+            )
+        elif self.environment.is_async:
+            self.writeline(
+                "for event in (await template._get_default_module_async())"
+                "._body_stream:"
+            )
+        else:
+            self.writeline("yield from template._get_default_module()._body_stream")
+            skip_event_yield = True
+
+        if not skip_event_yield:
+            self.indent()
+            self.simple_write("event", frame)
+            self.outdent()
+
+        if node.ignore_missing:
+            self.outdent()
+
+    def _import_common(
+        self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame
+    ) -> None:
+        self.write(f"{self.choose_async('await ')}environment.get_template(")
+        self.visit(node.template, frame)
+        self.write(f", {self.name!r}).")
+
+        if node.with_context:
+            f_name = f"make_module{self.choose_async('_async')}"
+            self.write(
+                f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})"
+            )
+        else:
+            self.write(f"_get_default_module{self.choose_async('_async')}(context)")
+
+    def visit_Import(self, node: nodes.Import, frame: Frame) -> None:
+        """Visit regular imports."""
+        self.writeline(f"{frame.symbols.ref(node.target)} = ", node)
+        if frame.toplevel:
+            self.write(f"context.vars[{node.target!r}] = ")
+
+        self._import_common(node, frame)
+
+        if frame.toplevel and not node.target.startswith("_"):
+            self.writeline(f"context.exported_vars.discard({node.target!r})")
+
+    def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None:
+        """Visit named imports."""
+        self.newline(node)
+        self.write("included_template = ")
+        self._import_common(node, frame)
+        var_names = []
+        discarded_names = []
+        for name in node.names:
+            if isinstance(name, tuple):
+                name, alias = name
+            else:
+                alias = name
+            self.writeline(
+                f"{frame.symbols.ref(alias)} ="
+                f" getattr(included_template, {name!r}, missing)"
+            )
+            self.writeline(f"if {frame.symbols.ref(alias)} is missing:")
+            self.indent()
+            message = (
+                "the template {included_template.__name__!r}"
+                f" (imported on {self.position(node)})"
+                f" does not export the requested name {name!r}"
+            )
+            self.writeline(
+                f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})"
+            )
+            self.outdent()
+            if frame.toplevel:
+                var_names.append(alias)
+                if not alias.startswith("_"):
+                    discarded_names.append(alias)
+
+        if var_names:
+            if len(var_names) == 1:
+                name = var_names[0]
+                self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}")
+            else:
+                names_kv = ", ".join(
+                    f"{name!r}: {frame.symbols.ref(name)}" for name in var_names
+                )
+                self.writeline(f"context.vars.update({{{names_kv}}})")
+        if discarded_names:
+            if len(discarded_names) == 1:
+                self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})")
+            else:
+                names_str = ", ".join(map(repr, discarded_names))
+                self.writeline(
+                    f"context.exported_vars.difference_update(({names_str}))"
+                )
+
+    def visit_For(self, node: nodes.For, frame: Frame) -> None:
+        loop_frame = frame.inner()
+        loop_frame.loop_frame = True
+        test_frame = frame.inner()
+        else_frame = frame.inner()
+
+        # try to figure out if we have an extended loop.  An extended loop
+        # is necessary if the loop is in recursive mode if the special loop
+        # variable is accessed in the body if the body is a scoped block.
+        extended_loop = (
+            node.recursive
+            or "loop"
+            in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",))
+            or any(block.scoped for block in node.find_all(nodes.Block))
+        )
+
+        loop_ref = None
+        if extended_loop:
+            loop_ref = loop_frame.symbols.declare_parameter("loop")
+
+        loop_frame.symbols.analyze_node(node, for_branch="body")
+        if node.else_:
+            else_frame.symbols.analyze_node(node, for_branch="else")
+
+        if node.test:
+            loop_filter_func = self.temporary_identifier()
+            test_frame.symbols.analyze_node(node, for_branch="test")
+            self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test)
+            self.indent()
+            self.enter_frame(test_frame)
+            self.writeline(self.choose_async("async for ", "for "))
+            self.visit(node.target, loop_frame)
+            self.write(" in ")
+            self.write(self.choose_async("auto_aiter(fiter)", "fiter"))
+            self.write(":")
+            self.indent()
+            self.writeline("if ", node.test)
+            self.visit(node.test, test_frame)
+            self.write(":")
+            self.indent()
+            self.writeline("yield ")
+            self.visit(node.target, loop_frame)
+            self.outdent(3)
+            self.leave_frame(test_frame, with_python_scope=True)
+
+        # if we don't have an recursive loop we have to find the shadowed
+        # variables at that point.  Because loops can be nested but the loop
+        # variable is a special one we have to enforce aliasing for it.
+        if node.recursive:
+            self.writeline(
+                f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node
+            )
+            self.indent()
+            self.buffer(loop_frame)
+
+            # Use the same buffer for the else frame
+            else_frame.buffer = loop_frame.buffer
+
+        # make sure the loop variable is a special one and raise a template
+        # assertion error if a loop tries to write to loop
+        if extended_loop:
+            self.writeline(f"{loop_ref} = missing")
+
+        for name in node.find_all(nodes.Name):
+            if name.ctx == "store" and name.name == "loop":
+                self.fail(
+                    "Can't assign to special loop variable in for-loop target",
+                    name.lineno,
+                )
+
+        if node.else_:
+            iteration_indicator = self.temporary_identifier()
+            self.writeline(f"{iteration_indicator} = 1")
+
+        self.writeline(self.choose_async("async for ", "for "), node)
+        self.visit(node.target, loop_frame)
+        if extended_loop:
+            self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(")
+        else:
+            self.write(" in ")
+
+        if node.test:
+            self.write(f"{loop_filter_func}(")
+        if node.recursive:
+            self.write("reciter")
+        else:
+            if self.environment.is_async and not extended_loop:
+                self.write("auto_aiter(")
+            self.visit(node.iter, frame)
+            if self.environment.is_async and not extended_loop:
+                self.write(")")
+        if node.test:
+            self.write(")")
+
+        if node.recursive:
+            self.write(", undefined, loop_render_func, depth):")
+        else:
+            self.write(", undefined):" if extended_loop else ":")
+
+        self.indent()
+        self.enter_frame(loop_frame)
+
+        self.writeline("_loop_vars = {}")
+        self.blockvisit(node.body, loop_frame)
+        if node.else_:
+            self.writeline(f"{iteration_indicator} = 0")
+        self.outdent()
+        self.leave_frame(
+            loop_frame, with_python_scope=node.recursive and not node.else_
+        )
+
+        if node.else_:
+            self.writeline(f"if {iteration_indicator}:")
+            self.indent()
+            self.enter_frame(else_frame)
+            self.blockvisit(node.else_, else_frame)
+            self.leave_frame(else_frame)
+            self.outdent()
+
+        # if the node was recursive we have to return the buffer contents
+        # and start the iteration code
+        if node.recursive:
+            self.return_buffer_contents(loop_frame)
+            self.outdent()
+            self.start_write(frame, node)
+            self.write(f"{self.choose_async('await ')}loop(")
+            if self.environment.is_async:
+                self.write("auto_aiter(")
+            self.visit(node.iter, frame)
+            if self.environment.is_async:
+                self.write(")")
+            self.write(", loop)")
+            self.end_write(frame)
+
+        # at the end of the iteration, clear any assignments made in the
+        # loop from the top level
+        if self._assign_stack:
+            self._assign_stack[-1].difference_update(loop_frame.symbols.stores)
+
+    def visit_If(self, node: nodes.If, frame: Frame) -> None:
+        if_frame = frame.soft()
+        self.writeline("if ", node)
+        self.visit(node.test, if_frame)
+        self.write(":")
+        self.indent()
+        self.blockvisit(node.body, if_frame)
+        self.outdent()
+        for elif_ in node.elif_:
+            self.writeline("elif ", elif_)
+            self.visit(elif_.test, if_frame)
+            self.write(":")
+            self.indent()
+            self.blockvisit(elif_.body, if_frame)
+            self.outdent()
+        if node.else_:
+            self.writeline("else:")
+            self.indent()
+            self.blockvisit(node.else_, if_frame)
+            self.outdent()
+
+    def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None:
+        macro_frame, macro_ref = self.macro_body(node, frame)
+        self.newline()
+        if frame.toplevel:
+            if not node.name.startswith("_"):
+                self.write(f"context.exported_vars.add({node.name!r})")
+            self.writeline(f"context.vars[{node.name!r}] = ")
+        self.write(f"{frame.symbols.ref(node.name)} = ")
+        self.macro_def(macro_ref, macro_frame)
+
+    def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None:
+        call_frame, macro_ref = self.macro_body(node, frame)
+        self.writeline("caller = ")
+        self.macro_def(macro_ref, call_frame)
+        self.start_write(frame, node)
+        self.visit_Call(node.call, frame, forward_caller=True)
+        self.end_write(frame)
+
+    def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None:
+        filter_frame = frame.inner()
+        filter_frame.symbols.analyze_node(node)
+        self.enter_frame(filter_frame)
+        self.buffer(filter_frame)
+        self.blockvisit(node.body, filter_frame)
+        self.start_write(frame, node)
+        self.visit_Filter(node.filter, filter_frame)
+        self.end_write(frame)
+        self.leave_frame(filter_frame)
+
+    def visit_With(self, node: nodes.With, frame: Frame) -> None:
+        with_frame = frame.inner()
+        with_frame.symbols.analyze_node(node)
+        self.enter_frame(with_frame)
+        for target, expr in zip(node.targets, node.values):
+            self.newline()
+            self.visit(target, with_frame)
+            self.write(" = ")
+            self.visit(expr, frame)
+        self.blockvisit(node.body, with_frame)
+        self.leave_frame(with_frame)
+
+    def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None:
+        self.newline(node)
+        self.visit(node.node, frame)
+
+    class _FinalizeInfo(t.NamedTuple):
+        const: t.Optional[t.Callable[..., str]]
+        src: t.Optional[str]
+
+    @staticmethod
+    def _default_finalize(value: t.Any) -> t.Any:
+        """The default finalize function if the environment isn't
+        configured with one. Or, if the environment has one, this is
+        called on that function's output for constants.
+        """
+        return str(value)
+
+    _finalize: t.Optional[_FinalizeInfo] = None
+
+    def _make_finalize(self) -> _FinalizeInfo:
+        """Build the finalize function to be used on constants and at
+        runtime. Cached so it's only created once for all output nodes.
+
+        Returns a ``namedtuple`` with the following attributes:
+
+        ``const``
+            A function to finalize constant data at compile time.
+
+        ``src``
+            Source code to output around nodes to be evaluated at
+            runtime.
+        """
+        if self._finalize is not None:
+            return self._finalize
+
+        finalize: t.Optional[t.Callable[..., t.Any]]
+        finalize = default = self._default_finalize
+        src = None
+
+        if self.environment.finalize:
+            src = "environment.finalize("
+            env_finalize = self.environment.finalize
+            pass_arg = {
+                _PassArg.context: "context",
+                _PassArg.eval_context: "context.eval_ctx",
+                _PassArg.environment: "environment",
+            }.get(
+                _PassArg.from_obj(env_finalize)  # type: ignore
+            )
+            finalize = None
+
+            if pass_arg is None:
+
+                def finalize(value: t.Any) -> t.Any:
+                    return default(env_finalize(value))
+
+            else:
+                src = f"{src}{pass_arg}, "
+
+                if pass_arg == "environment":
+
+                    def finalize(value: t.Any) -> t.Any:
+                        return default(env_finalize(self.environment, value))
+
+        self._finalize = self._FinalizeInfo(finalize, src)
+        return self._finalize
+
+    def _output_const_repr(self, group: t.Iterable[t.Any]) -> str:
+        """Given a group of constant values converted from ``Output``
+        child nodes, produce a string to write to the template module
+        source.
+        """
+        return repr(concat(group))
+
+    def _output_child_to_const(
+        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
+    ) -> str:
+        """Try to optimize a child of an ``Output`` node by trying to
+        convert it to constant, finalized data at compile time.
+
+        If :exc:`Impossible` is raised, the node is not constant and
+        will be evaluated at runtime. Any other exception will also be
+        evaluated at runtime for easier debugging.
+        """
+        const = node.as_const(frame.eval_ctx)
+
+        if frame.eval_ctx.autoescape:
+            const = escape(const)
+
+        # Template data doesn't go through finalize.
+        if isinstance(node, nodes.TemplateData):
+            return str(const)
+
+        return finalize.const(const)  # type: ignore
+
+    def _output_child_pre(
+        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
+    ) -> None:
+        """Output extra source code before visiting a child of an
+        ``Output`` node.
+        """
+        if frame.eval_ctx.volatile:
+            self.write("(escape if context.eval_ctx.autoescape else str)(")
+        elif frame.eval_ctx.autoescape:
+            self.write("escape(")
+        else:
+            self.write("str(")
+
+        if finalize.src is not None:
+            self.write(finalize.src)
+
+    def _output_child_post(
+        self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo
+    ) -> None:
+        """Output extra source code after visiting a child of an
+        ``Output`` node.
+        """
+        self.write(")")
+
+        if finalize.src is not None:
+            self.write(")")
+
+    def visit_Output(self, node: nodes.Output, frame: Frame) -> None:
+        # If an extends is active, don't render outside a block.
+        if frame.require_output_check:
+            # A top-level extends is known to exist at compile time.
+            if self.has_known_extends:
+                return
+
+            self.writeline("if parent_template is None:")
+            self.indent()
+
+        finalize = self._make_finalize()
+        body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = []
+
+        # Evaluate constants at compile time if possible. Each item in
+        # body will be either a list of static data or a node to be
+        # evaluated at runtime.
+        for child in node.nodes:
+            try:
+                if not (
+                    # If the finalize function requires runtime context,
+                    # constants can't be evaluated at compile time.
+                    finalize.const
+                    # Unless it's basic template data that won't be
+                    # finalized anyway.
+                    or isinstance(child, nodes.TemplateData)
+                ):
+                    raise nodes.Impossible()
+
+                const = self._output_child_to_const(child, frame, finalize)
+            except (nodes.Impossible, Exception):
+                # The node was not constant and needs to be evaluated at
+                # runtime. Or another error was raised, which is easier
+                # to debug at runtime.
+                body.append(child)
+                continue
+
+            if body and isinstance(body[-1], list):
+                body[-1].append(const)
+            else:
+                body.append([const])
+
+        if frame.buffer is not None:
+            if len(body) == 1:
+                self.writeline(f"{frame.buffer}.append(")
+            else:
+                self.writeline(f"{frame.buffer}.extend((")
+
+            self.indent()
+
+        for item in body:
+            if isinstance(item, list):
+                # A group of constant data to join and output.
+                val = self._output_const_repr(item)
+
+                if frame.buffer is None:
+                    self.writeline("yield " + val)
+                else:
+                    self.writeline(val + ",")
+            else:
+                if frame.buffer is None:
+                    self.writeline("yield ", item)
+                else:
+                    self.newline(item)
+
+                # A node to be evaluated at runtime.
+                self._output_child_pre(item, frame, finalize)
+                self.visit(item, frame)
+                self._output_child_post(item, frame, finalize)
+
+                if frame.buffer is not None:
+                    self.write(",")
+
+        if frame.buffer is not None:
+            self.outdent()
+            self.writeline(")" if len(body) == 1 else "))")
+
+        if frame.require_output_check:
+            self.outdent()
+
+    def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None:
+        self.push_assign_tracking()
+        self.newline(node)
+        self.visit(node.target, frame)
+        self.write(" = ")
+        self.visit(node.node, frame)
+        self.pop_assign_tracking(frame)
+
+    def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None:
+        self.push_assign_tracking()
+        block_frame = frame.inner()
+        # This is a special case.  Since a set block always captures we
+        # will disable output checks.  This way one can use set blocks
+        # toplevel even in extended templates.
+        block_frame.require_output_check = False
+        block_frame.symbols.analyze_node(node)
+        self.enter_frame(block_frame)
+        self.buffer(block_frame)
+        self.blockvisit(node.body, block_frame)
+        self.newline(node)
+        self.visit(node.target, frame)
+        self.write(" = (Markup if context.eval_ctx.autoescape else identity)(")
+        if node.filter is not None:
+            self.visit_Filter(node.filter, block_frame)
+        else:
+            self.write(f"concat({block_frame.buffer})")
+        self.write(")")
+        self.pop_assign_tracking(frame)
+        self.leave_frame(block_frame)
+
+    # -- Expression Visitors
+
+    def visit_Name(self, node: nodes.Name, frame: Frame) -> None:
+        if node.ctx == "store" and (
+            frame.toplevel or frame.loop_frame or frame.block_frame
+        ):
+            if self._assign_stack:
+                self._assign_stack[-1].add(node.name)
+        ref = frame.symbols.ref(node.name)
+
+        # If we are looking up a variable we might have to deal with the
+        # case where it's undefined.  We can skip that case if the load
+        # instruction indicates a parameter which are always defined.
+        if node.ctx == "load":
+            load = frame.symbols.find_load(ref)
+            if not (
+                load is not None
+                and load[0] == VAR_LOAD_PARAMETER
+                and not self.parameter_is_undeclared(ref)
+            ):
+                self.write(
+                    f"(undefined(name={node.name!r}) if {ref} is missing else {ref})"
+                )
+                return
+
+        self.write(ref)
+
+    def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None:
+        # NSRefs can only be used to store values; since they use the normal
+        # `foo.bar` notation they will be parsed as a normal attribute access
+        # when used anywhere but in a `set` context
+        ref = frame.symbols.ref(node.name)
+        self.writeline(f"if not isinstance({ref}, Namespace):")
+        self.indent()
+        self.writeline(
+            "raise TemplateRuntimeError"
+            '("cannot assign attribute on non-namespace object")'
+        )
+        self.outdent()
+        self.writeline(f"{ref}[{node.attr!r}]")
+
+    def visit_Const(self, node: nodes.Const, frame: Frame) -> None:
+        val = node.as_const(frame.eval_ctx)
+        if isinstance(val, float):
+            self.write(str(val))
+        else:
+            self.write(repr(val))
+
+    def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None:
+        try:
+            self.write(repr(node.as_const(frame.eval_ctx)))
+        except nodes.Impossible:
+            self.write(
+                f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})"
+            )
+
+    def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None:
+        self.write("(")
+        idx = -1
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(", ")
+            self.visit(item, frame)
+        self.write(",)" if idx == 0 else ")")
+
+    def visit_List(self, node: nodes.List, frame: Frame) -> None:
+        self.write("[")
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(", ")
+            self.visit(item, frame)
+        self.write("]")
+
+    def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None:
+        self.write("{")
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(", ")
+            self.visit(item.key, frame)
+            self.write(": ")
+            self.visit(item.value, frame)
+        self.write("}")
+
+    visit_Add = _make_binop("+")
+    visit_Sub = _make_binop("-")
+    visit_Mul = _make_binop("*")
+    visit_Div = _make_binop("/")
+    visit_FloorDiv = _make_binop("//")
+    visit_Pow = _make_binop("**")
+    visit_Mod = _make_binop("%")
+    visit_And = _make_binop("and")
+    visit_Or = _make_binop("or")
+    visit_Pos = _make_unop("+")
+    visit_Neg = _make_unop("-")
+    visit_Not = _make_unop("not ")
+
+    @optimizeconst
+    def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None:
+        if frame.eval_ctx.volatile:
+            func_name = "(markup_join if context.eval_ctx.volatile else str_join)"
+        elif frame.eval_ctx.autoescape:
+            func_name = "markup_join"
+        else:
+            func_name = "str_join"
+        self.write(f"{func_name}((")
+        for arg in node.nodes:
+            self.visit(arg, frame)
+            self.write(", ")
+        self.write("))")
+
+    @optimizeconst
+    def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None:
+        self.write("(")
+        self.visit(node.expr, frame)
+        for op in node.ops:
+            self.visit(op, frame)
+        self.write(")")
+
+    def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None:
+        self.write(f" {operators[node.op]} ")
+        self.visit(node.expr, frame)
+
+    @optimizeconst
+    def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None:
+        if self.environment.is_async:
+            self.write("(await auto_await(")
+
+        self.write("environment.getattr(")
+        self.visit(node.node, frame)
+        self.write(f", {node.attr!r})")
+
+        if self.environment.is_async:
+            self.write("))")
+
+    @optimizeconst
+    def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None:
+        # slices bypass the environment getitem method.
+        if isinstance(node.arg, nodes.Slice):
+            self.visit(node.node, frame)
+            self.write("[")
+            self.visit(node.arg, frame)
+            self.write("]")
+        else:
+            if self.environment.is_async:
+                self.write("(await auto_await(")
+
+            self.write("environment.getitem(")
+            self.visit(node.node, frame)
+            self.write(", ")
+            self.visit(node.arg, frame)
+            self.write(")")
+
+            if self.environment.is_async:
+                self.write("))")
+
+    def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None:
+        if node.start is not None:
+            self.visit(node.start, frame)
+        self.write(":")
+        if node.stop is not None:
+            self.visit(node.stop, frame)
+        if node.step is not None:
+            self.write(":")
+            self.visit(node.step, frame)
+
+    @contextmanager
+    def _filter_test_common(
+        self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool
+    ) -> t.Iterator[None]:
+        if self.environment.is_async:
+            self.write("(await auto_await(")
+
+        if is_filter:
+            self.write(f"{self.filters[node.name]}(")
+            func = self.environment.filters.get(node.name)
+        else:
+            self.write(f"{self.tests[node.name]}(")
+            func = self.environment.tests.get(node.name)
+
+        # When inside an If or CondExpr frame, allow the filter to be
+        # undefined at compile time and only raise an error if it's
+        # actually called at runtime. See pull_dependencies.
+        if func is None and not frame.soft_frame:
+            type_name = "filter" if is_filter else "test"
+            self.fail(f"No {type_name} named {node.name!r}.", node.lineno)
+
+        pass_arg = {
+            _PassArg.context: "context",
+            _PassArg.eval_context: "context.eval_ctx",
+            _PassArg.environment: "environment",
+        }.get(
+            _PassArg.from_obj(func)  # type: ignore
+        )
+
+        if pass_arg is not None:
+            self.write(f"{pass_arg}, ")
+
+        # Back to the visitor function to handle visiting the target of
+        # the filter or test.
+        yield
+
+        self.signature(node, frame)
+        self.write(")")
+
+        if self.environment.is_async:
+            self.write("))")
+
+    @optimizeconst
+    def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None:
+        with self._filter_test_common(node, frame, True):
+            # if the filter node is None we are inside a filter block
+            # and want to write to the current buffer
+            if node.node is not None:
+                self.visit(node.node, frame)
+            elif frame.eval_ctx.volatile:
+                self.write(
+                    f"(Markup(concat({frame.buffer}))"
+                    f" if context.eval_ctx.autoescape else concat({frame.buffer}))"
+                )
+            elif frame.eval_ctx.autoescape:
+                self.write(f"Markup(concat({frame.buffer}))")
+            else:
+                self.write(f"concat({frame.buffer})")
+
+    @optimizeconst
+    def visit_Test(self, node: nodes.Test, frame: Frame) -> None:
+        with self._filter_test_common(node, frame, False):
+            self.visit(node.node, frame)
+
+    @optimizeconst
+    def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None:
+        frame = frame.soft()
+
+        def write_expr2() -> None:
+            if node.expr2 is not None:
+                self.visit(node.expr2, frame)
+                return
+
+            self.write(
+                f'cond_expr_undefined("the inline if-expression on'
+                f" {self.position(node)} evaluated to false and no else"
+                f' section was defined.")'
+            )
+
+        self.write("(")
+        self.visit(node.expr1, frame)
+        self.write(" if ")
+        self.visit(node.test, frame)
+        self.write(" else ")
+        write_expr2()
+        self.write(")")
+
+    @optimizeconst
+    def visit_Call(
+        self, node: nodes.Call, frame: Frame, forward_caller: bool = False
+    ) -> None:
+        if self.environment.is_async:
+            self.write("(await auto_await(")
+        if self.environment.sandboxed:
+            self.write("environment.call(context, ")
+        else:
+            self.write("context.call(")
+        self.visit(node.node, frame)
+        extra_kwargs = {"caller": "caller"} if forward_caller else None
+        loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {}
+        block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {}
+        if extra_kwargs:
+            extra_kwargs.update(loop_kwargs, **block_kwargs)
+        elif loop_kwargs or block_kwargs:
+            extra_kwargs = dict(loop_kwargs, **block_kwargs)
+        self.signature(node, frame, extra_kwargs)
+        self.write(")")
+        if self.environment.is_async:
+            self.write("))")
+
+    def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None:
+        self.write(node.key + "=")
+        self.visit(node.value, frame)
+
+    # -- Unused nodes for extensions
+
+    def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None:
+        self.write("Markup(")
+        self.visit(node.expr, frame)
+        self.write(")")
+
+    def visit_MarkSafeIfAutoescape(
+        self, node: nodes.MarkSafeIfAutoescape, frame: Frame
+    ) -> None:
+        self.write("(Markup if context.eval_ctx.autoescape else identity)(")
+        self.visit(node.expr, frame)
+        self.write(")")
+
+    def visit_EnvironmentAttribute(
+        self, node: nodes.EnvironmentAttribute, frame: Frame
+    ) -> None:
+        self.write("environment." + node.name)
+
+    def visit_ExtensionAttribute(
+        self, node: nodes.ExtensionAttribute, frame: Frame
+    ) -> None:
+        self.write(f"environment.extensions[{node.identifier!r}].{node.name}")
+
+    def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None:
+        self.write(self.import_aliases[node.importname])
+
+    def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None:
+        self.write(node.name)
+
+    def visit_ContextReference(
+        self, node: nodes.ContextReference, frame: Frame
+    ) -> None:
+        self.write("context")
+
+    def visit_DerivedContextReference(
+        self, node: nodes.DerivedContextReference, frame: Frame
+    ) -> None:
+        self.write(self.derive_context(frame))
+
+    def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None:
+        self.writeline("continue", node)
+
+    def visit_Break(self, node: nodes.Break, frame: Frame) -> None:
+        self.writeline("break", node)
+
+    def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None:
+        scope_frame = frame.inner()
+        scope_frame.symbols.analyze_node(node)
+        self.enter_frame(scope_frame)
+        self.blockvisit(node.body, scope_frame)
+        self.leave_frame(scope_frame)
+
+    def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None:
+        ctx = self.temporary_identifier()
+        self.writeline(f"{ctx} = {self.derive_context(frame)}")
+        self.writeline(f"{ctx}.vars = ")
+        self.visit(node.context, frame)
+        self.push_context_reference(ctx)
+
+        scope_frame = frame.inner(isolated=True)
+        scope_frame.symbols.analyze_node(node)
+        self.enter_frame(scope_frame)
+        self.blockvisit(node.body, scope_frame)
+        self.leave_frame(scope_frame)
+        self.pop_context_reference()
+
+    def visit_EvalContextModifier(
+        self, node: nodes.EvalContextModifier, frame: Frame
+    ) -> None:
+        for keyword in node.options:
+            self.writeline(f"context.eval_ctx.{keyword.key} = ")
+            self.visit(keyword.value, frame)
+            try:
+                val = keyword.value.as_const(frame.eval_ctx)
+            except nodes.Impossible:
+                frame.eval_ctx.volatile = True
+            else:
+                setattr(frame.eval_ctx, keyword.key, val)
+
+    def visit_ScopedEvalContextModifier(
+        self, node: nodes.ScopedEvalContextModifier, frame: Frame
+    ) -> None:
+        old_ctx_name = self.temporary_identifier()
+        saved_ctx = frame.eval_ctx.save()
+        self.writeline(f"{old_ctx_name} = context.eval_ctx.save()")
+        self.visit_EvalContextModifier(node, frame)
+        for child in node.body:
+            self.visit(child, frame)
+        frame.eval_ctx.revert(saved_ctx)
+        self.writeline(f"context.eval_ctx.revert({old_ctx_name})")
diff --git a/venv/lib/python3.9/site-packages/jinja2/constants.py b/venv/lib/python3.9/site-packages/jinja2/constants.py
new file mode 100644
index 0000000..41a1c23
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/constants.py
@@ -0,0 +1,20 @@
+#: list of lorem ipsum words used by the lipsum() helper function
+LOREM_IPSUM_WORDS = """\
+a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
+auctor augue bibendum blandit class commodo condimentum congue consectetuer
+consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
+diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
+elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
+faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
+hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
+justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
+luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
+mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
+nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
+penatibus per pharetra phasellus placerat platea porta porttitor posuere
+potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
+ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
+sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
+tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
+ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
+viverra volutpat vulputate"""
diff --git a/venv/lib/python3.9/site-packages/jinja2/debug.py b/venv/lib/python3.9/site-packages/jinja2/debug.py
new file mode 100644
index 0000000..7ed7e92
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/debug.py
@@ -0,0 +1,191 @@
+import sys
+import typing as t
+from types import CodeType
+from types import TracebackType
+
+from .exceptions import TemplateSyntaxError
+from .utils import internal_code
+from .utils import missing
+
+if t.TYPE_CHECKING:
+    from .runtime import Context
+
+
+def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException:
+    """Rewrite the current exception to replace any tracebacks from
+    within compiled template code with tracebacks that look like they
+    came from the template source.
+
+    This must be called within an ``except`` block.
+
+    :param source: For ``TemplateSyntaxError``, the original source if
+        known.
+    :return: The original exception with the rewritten traceback.
+    """
+    _, exc_value, tb = sys.exc_info()
+    exc_value = t.cast(BaseException, exc_value)
+    tb = t.cast(TracebackType, tb)
+
+    if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
+        exc_value.translated = True
+        exc_value.source = source
+        # Remove the old traceback, otherwise the frames from the
+        # compiler still show up.
+        exc_value.with_traceback(None)
+        # Outside of runtime, so the frame isn't executing template
+        # code, but it still needs to point at the template.
+        tb = fake_traceback(
+            exc_value, None, exc_value.filename or "<unknown>", exc_value.lineno
+        )
+    else:
+        # Skip the frame for the render function.
+        tb = tb.tb_next
+
+    stack = []
+
+    # Build the stack of traceback object, replacing any in template
+    # code with the source file and line information.
+    while tb is not None:
+        # Skip frames decorated with @internalcode. These are internal
+        # calls that aren't useful in template debugging output.
+        if tb.tb_frame.f_code in internal_code:
+            tb = tb.tb_next
+            continue
+
+        template = tb.tb_frame.f_globals.get("__jinja_template__")
+
+        if template is not None:
+            lineno = template.get_corresponding_lineno(tb.tb_lineno)
+            fake_tb = fake_traceback(exc_value, tb, template.filename, lineno)
+            stack.append(fake_tb)
+        else:
+            stack.append(tb)
+
+        tb = tb.tb_next
+
+    tb_next = None
+
+    # Assign tb_next in reverse to avoid circular references.
+    for tb in reversed(stack):
+        tb.tb_next = tb_next
+        tb_next = tb
+
+    return exc_value.with_traceback(tb_next)
+
+
+def fake_traceback(  # type: ignore
+    exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int
+) -> TracebackType:
+    """Produce a new traceback object that looks like it came from the
+    template source instead of the compiled code. The filename, line
+    number, and location name will point to the template, and the local
+    variables will be the current template context.
+
+    :param exc_value: The original exception to be re-raised to create
+        the new traceback.
+    :param tb: The original traceback to get the local variables and
+        code info from.
+    :param filename: The template filename.
+    :param lineno: The line number in the template source.
+    """
+    if tb is not None:
+        # Replace the real locals with the context that would be
+        # available at that point in the template.
+        locals = get_template_locals(tb.tb_frame.f_locals)
+        locals.pop("__jinja_exception__", None)
+    else:
+        locals = {}
+
+    globals = {
+        "__name__": filename,
+        "__file__": filename,
+        "__jinja_exception__": exc_value,
+    }
+    # Raise an exception at the correct line number.
+    code: CodeType = compile(
+        "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec"
+    )
+
+    # Build a new code object that points to the template file and
+    # replaces the location with a block name.
+    location = "template"
+
+    if tb is not None:
+        function = tb.tb_frame.f_code.co_name
+
+        if function == "root":
+            location = "top-level template code"
+        elif function.startswith("block_"):
+            location = f"block {function[6:]!r}"
+
+    if sys.version_info >= (3, 8):
+        code = code.replace(co_name=location)
+    else:
+        code = CodeType(
+            code.co_argcount,
+            code.co_kwonlyargcount,
+            code.co_nlocals,
+            code.co_stacksize,
+            code.co_flags,
+            code.co_code,
+            code.co_consts,
+            code.co_names,
+            code.co_varnames,
+            code.co_filename,
+            location,
+            code.co_firstlineno,
+            code.co_lnotab,
+            code.co_freevars,
+            code.co_cellvars,
+        )
+
+    # Execute the new code, which is guaranteed to raise, and return
+    # the new traceback without this frame.
+    try:
+        exec(code, globals, locals)
+    except BaseException:
+        return sys.exc_info()[2].tb_next  # type: ignore
+
+
+def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]:
+    """Based on the runtime locals, get the context that would be
+    available at that point in the template.
+    """
+    # Start with the current template context.
+    ctx: "t.Optional[Context]" = real_locals.get("context")
+
+    if ctx is not None:
+        data: t.Dict[str, t.Any] = ctx.get_all().copy()
+    else:
+        data = {}
+
+    # Might be in a derived context that only sets local variables
+    # rather than pushing a context. Local variables follow the scheme
+    # l_depth_name. Find the highest-depth local that has a value for
+    # each name.
+    local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {}
+
+    for name, value in real_locals.items():
+        if not name.startswith("l_") or value is missing:
+            # Not a template variable, or no longer relevant.
+            continue
+
+        try:
+            _, depth_str, name = name.split("_", 2)
+            depth = int(depth_str)
+        except ValueError:
+            continue
+
+        cur_depth = local_overrides.get(name, (-1,))[0]
+
+        if cur_depth < depth:
+            local_overrides[name] = (depth, value)
+
+    # Modify the context with any derived context.
+    for name, (_, value) in local_overrides.items():
+        if value is missing:
+            data.pop(name, None)
+        else:
+            data[name] = value
+
+    return data
diff --git a/venv/lib/python3.9/site-packages/jinja2/defaults.py b/venv/lib/python3.9/site-packages/jinja2/defaults.py
new file mode 100644
index 0000000..638cad3
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/defaults.py
@@ -0,0 +1,48 @@
+import typing as t
+
+from .filters import FILTERS as DEFAULT_FILTERS  # noqa: F401
+from .tests import TESTS as DEFAULT_TESTS  # noqa: F401
+from .utils import Cycler
+from .utils import generate_lorem_ipsum
+from .utils import Joiner
+from .utils import Namespace
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+
+# defaults for the parser / lexer
+BLOCK_START_STRING = "{%"
+BLOCK_END_STRING = "%}"
+VARIABLE_START_STRING = "{{"
+VARIABLE_END_STRING = "}}"
+COMMENT_START_STRING = "{#"
+COMMENT_END_STRING = "#}"
+LINE_STATEMENT_PREFIX: t.Optional[str] = None
+LINE_COMMENT_PREFIX: t.Optional[str] = None
+TRIM_BLOCKS = False
+LSTRIP_BLOCKS = False
+NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n"
+KEEP_TRAILING_NEWLINE = False
+
+# default filters, tests and namespace
+
+DEFAULT_NAMESPACE = {
+    "range": range,
+    "dict": dict,
+    "lipsum": generate_lorem_ipsum,
+    "cycler": Cycler,
+    "joiner": Joiner,
+    "namespace": Namespace,
+}
+
+# default policies
+DEFAULT_POLICIES: t.Dict[str, t.Any] = {
+    "compiler.ascii_str": True,
+    "urlize.rel": "noopener",
+    "urlize.target": None,
+    "urlize.extra_schemes": None,
+    "truncate.leeway": 5,
+    "json.dumps_function": None,
+    "json.dumps_kwargs": {"sort_keys": True},
+    "ext.i18n.trimmed": False,
+}
diff --git a/venv/lib/python3.9/site-packages/jinja2/environment.py b/venv/lib/python3.9/site-packages/jinja2/environment.py
new file mode 100644
index 0000000..ea04e8b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/environment.py
@@ -0,0 +1,1667 @@
+"""Classes for managing templates and their runtime and compile time
+options.
+"""
+import os
+import typing
+import typing as t
+import weakref
+from collections import ChainMap
+from functools import lru_cache
+from functools import partial
+from functools import reduce
+from types import CodeType
+
+from markupsafe import Markup
+
+from . import nodes
+from .compiler import CodeGenerator
+from .compiler import generate
+from .defaults import BLOCK_END_STRING
+from .defaults import BLOCK_START_STRING
+from .defaults import COMMENT_END_STRING
+from .defaults import COMMENT_START_STRING
+from .defaults import DEFAULT_FILTERS
+from .defaults import DEFAULT_NAMESPACE
+from .defaults import DEFAULT_POLICIES
+from .defaults import DEFAULT_TESTS
+from .defaults import KEEP_TRAILING_NEWLINE
+from .defaults import LINE_COMMENT_PREFIX
+from .defaults import LINE_STATEMENT_PREFIX
+from .defaults import LSTRIP_BLOCKS
+from .defaults import NEWLINE_SEQUENCE
+from .defaults import TRIM_BLOCKS
+from .defaults import VARIABLE_END_STRING
+from .defaults import VARIABLE_START_STRING
+from .exceptions import TemplateNotFound
+from .exceptions import TemplateRuntimeError
+from .exceptions import TemplatesNotFound
+from .exceptions import TemplateSyntaxError
+from .exceptions import UndefinedError
+from .lexer import get_lexer
+from .lexer import Lexer
+from .lexer import TokenStream
+from .nodes import EvalContext
+from .parser import Parser
+from .runtime import Context
+from .runtime import new_context
+from .runtime import Undefined
+from .utils import _PassArg
+from .utils import concat
+from .utils import consume
+from .utils import import_string
+from .utils import internalcode
+from .utils import LRUCache
+from .utils import missing
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .bccache import BytecodeCache
+    from .ext import Extension
+    from .loaders import BaseLoader
+
+_env_bound = t.TypeVar("_env_bound", bound="Environment")
+
+
+# for direct template usage we have up to ten living environments
+@lru_cache(maxsize=10)
+def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound:
+    """Return a new spontaneous environment. A spontaneous environment
+    is used for templates created directly rather than through an
+    existing environment.
+
+    :param cls: Environment class to create.
+    :param args: Positional arguments passed to environment.
+    """
+    env = cls(*args)
+    env.shared = True
+    return env
+
+
+def create_cache(
+    size: int,
+) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]:
+    """Return the cache class for the given size."""
+    if size == 0:
+        return None
+
+    if size < 0:
+        return {}
+
+    return LRUCache(size)  # type: ignore
+
+
+def copy_cache(
+    cache: t.Optional[t.MutableMapping],
+) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]:
+    """Create an empty copy of the given cache."""
+    if cache is None:
+        return None
+
+    if type(cache) is dict:
+        return {}
+
+    return LRUCache(cache.capacity)  # type: ignore
+
+
+def load_extensions(
+    environment: "Environment",
+    extensions: t.Sequence[t.Union[str, t.Type["Extension"]]],
+) -> t.Dict[str, "Extension"]:
+    """Load the extensions from the list and bind it to the environment.
+    Returns a dict of instantiated extensions.
+    """
+    result = {}
+
+    for extension in extensions:
+        if isinstance(extension, str):
+            extension = t.cast(t.Type["Extension"], import_string(extension))
+
+        result[extension.identifier] = extension(environment)
+
+    return result
+
+
+def _environment_config_check(environment: "Environment") -> "Environment":
+    """Perform a sanity check on the environment."""
+    assert issubclass(
+        environment.undefined, Undefined
+    ), "'undefined' must be a subclass of 'jinja2.Undefined'."
+    assert (
+        environment.block_start_string
+        != environment.variable_start_string
+        != environment.comment_start_string
+    ), "block, variable and comment start strings must be different."
+    assert environment.newline_sequence in {
+        "\r",
+        "\r\n",
+        "\n",
+    }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'."
+    return environment
+
+
+class Environment:
+    r"""The core component of Jinja is the `Environment`.  It contains
+    important shared variables like configuration, filters, tests,
+    globals and others.  Instances of this class may be modified if
+    they are not shared and if no template was loaded so far.
+    Modifications on environments after the first template was loaded
+    will lead to surprising effects and undefined behavior.
+
+    Here are the possible initialization parameters:
+
+        `block_start_string`
+            The string marking the beginning of a block.  Defaults to ``'{%'``.
+
+        `block_end_string`
+            The string marking the end of a block.  Defaults to ``'%}'``.
+
+        `variable_start_string`
+            The string marking the beginning of a print statement.
+            Defaults to ``'{{'``.
+
+        `variable_end_string`
+            The string marking the end of a print statement.  Defaults to
+            ``'}}'``.
+
+        `comment_start_string`
+            The string marking the beginning of a comment.  Defaults to ``'{#'``.
+
+        `comment_end_string`
+            The string marking the end of a comment.  Defaults to ``'#}'``.
+
+        `line_statement_prefix`
+            If given and a string, this will be used as prefix for line based
+            statements.  See also :ref:`line-statements`.
+
+        `line_comment_prefix`
+            If given and a string, this will be used as prefix for line based
+            comments.  See also :ref:`line-statements`.
+
+            .. versionadded:: 2.2
+
+        `trim_blocks`
+            If this is set to ``True`` the first newline after a block is
+            removed (block, not variable tag!).  Defaults to `False`.
+
+        `lstrip_blocks`
+            If this is set to ``True`` leading spaces and tabs are stripped
+            from the start of a line to a block.  Defaults to `False`.
+
+        `newline_sequence`
+            The sequence that starts a newline.  Must be one of ``'\r'``,
+            ``'\n'`` or ``'\r\n'``.  The default is ``'\n'`` which is a
+            useful default for Linux and OS X systems as well as web
+            applications.
+
+        `keep_trailing_newline`
+            Preserve the trailing newline when rendering templates.
+            The default is ``False``, which causes a single newline,
+            if present, to be stripped from the end of the template.
+
+            .. versionadded:: 2.7
+
+        `extensions`
+            List of Jinja extensions to use.  This can either be import paths
+            as strings or extension classes.  For more information have a
+            look at :ref:`the extensions documentation <jinja-extensions>`.
+
+        `optimized`
+            should the optimizer be enabled?  Default is ``True``.
+
+        `undefined`
+            :class:`Undefined` or a subclass of it that is used to represent
+            undefined values in the template.
+
+        `finalize`
+            A callable that can be used to process the result of a variable
+            expression before it is output.  For example one can convert
+            ``None`` implicitly into an empty string here.
+
+        `autoescape`
+            If set to ``True`` the XML/HTML autoescaping feature is enabled by
+            default.  For more details about autoescaping see
+            :class:`~markupsafe.Markup`.  As of Jinja 2.4 this can also
+            be a callable that is passed the template name and has to
+            return ``True`` or ``False`` depending on autoescape should be
+            enabled by default.
+
+            .. versionchanged:: 2.4
+               `autoescape` can now be a function
+
+        `loader`
+            The template loader for this environment.
+
+        `cache_size`
+            The size of the cache.  Per default this is ``400`` which means
+            that if more than 400 templates are loaded the loader will clean
+            out the least recently used template.  If the cache size is set to
+            ``0`` templates are recompiled all the time, if the cache size is
+            ``-1`` the cache will not be cleaned.
+
+            .. versionchanged:: 2.8
+               The cache size was increased to 400 from a low 50.
+
+        `auto_reload`
+            Some loaders load templates from locations where the template
+            sources may change (ie: file system or database).  If
+            ``auto_reload`` is set to ``True`` (default) every time a template is
+            requested the loader checks if the source changed and if yes, it
+            will reload the template.  For higher performance it's possible to
+            disable that.
+
+        `bytecode_cache`
+            If set to a bytecode cache object, this object will provide a
+            cache for the internal Jinja bytecode so that templates don't
+            have to be parsed if they were not changed.
+
+            See :ref:`bytecode-cache` for more information.
+
+        `enable_async`
+            If set to true this enables async template execution which
+            allows using async functions and generators.
+    """
+
+    #: if this environment is sandboxed.  Modifying this variable won't make
+    #: the environment sandboxed though.  For a real sandboxed environment
+    #: have a look at jinja2.sandbox.  This flag alone controls the code
+    #: generation by the compiler.
+    sandboxed = False
+
+    #: True if the environment is just an overlay
+    overlayed = False
+
+    #: the environment this environment is linked to if it is an overlay
+    linked_to: t.Optional["Environment"] = None
+
+    #: shared environments have this set to `True`.  A shared environment
+    #: must not be modified
+    shared = False
+
+    #: the class that is used for code generation.  See
+    #: :class:`~jinja2.compiler.CodeGenerator` for more information.
+    code_generator_class: t.Type["CodeGenerator"] = CodeGenerator
+
+    concat = "".join
+
+    #: the context class that is used for templates.  See
+    #: :class:`~jinja2.runtime.Context` for more information.
+    context_class: t.Type[Context] = Context
+
+    template_class: t.Type["Template"]
+
+    def __init__(
+        self,
+        block_start_string: str = BLOCK_START_STRING,
+        block_end_string: str = BLOCK_END_STRING,
+        variable_start_string: str = VARIABLE_START_STRING,
+        variable_end_string: str = VARIABLE_END_STRING,
+        comment_start_string: str = COMMENT_START_STRING,
+        comment_end_string: str = COMMENT_END_STRING,
+        line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX,
+        line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX,
+        trim_blocks: bool = TRIM_BLOCKS,
+        lstrip_blocks: bool = LSTRIP_BLOCKS,
+        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE,
+        keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE,
+        extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (),
+        optimized: bool = True,
+        undefined: t.Type[Undefined] = Undefined,
+        finalize: t.Optional[t.Callable[..., t.Any]] = None,
+        autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False,
+        loader: t.Optional["BaseLoader"] = None,
+        cache_size: int = 400,
+        auto_reload: bool = True,
+        bytecode_cache: t.Optional["BytecodeCache"] = None,
+        enable_async: bool = False,
+    ):
+        # !!Important notice!!
+        #   The constructor accepts quite a few arguments that should be
+        #   passed by keyword rather than position.  However it's important to
+        #   not change the order of arguments because it's used at least
+        #   internally in those cases:
+        #       -   spontaneous environments (i18n extension and Template)
+        #       -   unittests
+        #   If parameter changes are required only add parameters at the end
+        #   and don't change the arguments (or the defaults!) of the arguments
+        #   existing already.
+
+        # lexer / parser information
+        self.block_start_string = block_start_string
+        self.block_end_string = block_end_string
+        self.variable_start_string = variable_start_string
+        self.variable_end_string = variable_end_string
+        self.comment_start_string = comment_start_string
+        self.comment_end_string = comment_end_string
+        self.line_statement_prefix = line_statement_prefix
+        self.line_comment_prefix = line_comment_prefix
+        self.trim_blocks = trim_blocks
+        self.lstrip_blocks = lstrip_blocks
+        self.newline_sequence = newline_sequence
+        self.keep_trailing_newline = keep_trailing_newline
+
+        # runtime information
+        self.undefined: t.Type[Undefined] = undefined
+        self.optimized = optimized
+        self.finalize = finalize
+        self.autoescape = autoescape
+
+        # defaults
+        self.filters = DEFAULT_FILTERS.copy()
+        self.tests = DEFAULT_TESTS.copy()
+        self.globals = DEFAULT_NAMESPACE.copy()
+
+        # set the loader provided
+        self.loader = loader
+        self.cache = create_cache(cache_size)
+        self.bytecode_cache = bytecode_cache
+        self.auto_reload = auto_reload
+
+        # configurable policies
+        self.policies = DEFAULT_POLICIES.copy()
+
+        # load extensions
+        self.extensions = load_extensions(self, extensions)
+
+        self.is_async = enable_async
+        _environment_config_check(self)
+
+    def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None:
+        """Adds an extension after the environment was created.
+
+        .. versionadded:: 2.5
+        """
+        self.extensions.update(load_extensions(self, [extension]))
+
+    def extend(self, **attributes: t.Any) -> None:
+        """Add the items to the instance of the environment if they do not exist
+        yet.  This is used by :ref:`extensions <writing-extensions>` to register
+        callbacks and configuration values without breaking inheritance.
+        """
+        for key, value in attributes.items():
+            if not hasattr(self, key):
+                setattr(self, key, value)
+
+    def overlay(
+        self,
+        block_start_string: str = missing,
+        block_end_string: str = missing,
+        variable_start_string: str = missing,
+        variable_end_string: str = missing,
+        comment_start_string: str = missing,
+        comment_end_string: str = missing,
+        line_statement_prefix: t.Optional[str] = missing,
+        line_comment_prefix: t.Optional[str] = missing,
+        trim_blocks: bool = missing,
+        lstrip_blocks: bool = missing,
+        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing,
+        keep_trailing_newline: bool = missing,
+        extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing,
+        optimized: bool = missing,
+        undefined: t.Type[Undefined] = missing,
+        finalize: t.Optional[t.Callable[..., t.Any]] = missing,
+        autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing,
+        loader: t.Optional["BaseLoader"] = missing,
+        cache_size: int = missing,
+        auto_reload: bool = missing,
+        bytecode_cache: t.Optional["BytecodeCache"] = missing,
+        enable_async: bool = False,
+    ) -> "Environment":
+        """Create a new overlay environment that shares all the data with the
+        current environment except for cache and the overridden attributes.
+        Extensions cannot be removed for an overlayed environment.  An overlayed
+        environment automatically gets all the extensions of the environment it
+        is linked to plus optional extra extensions.
+
+        Creating overlays should happen after the initial environment was set
+        up completely.  Not all attributes are truly linked, some are just
+        copied over so modifications on the original environment may not shine
+        through.
+
+        .. versionchanged:: 3.1.2
+            Added the ``newline_sequence``,, ``keep_trailing_newline``,
+            and ``enable_async`` parameters to match ``__init__``.
+        """
+        args = dict(locals())
+        del args["self"], args["cache_size"], args["extensions"], args["enable_async"]
+
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.overlayed = True
+        rv.linked_to = self
+
+        for key, value in args.items():
+            if value is not missing:
+                setattr(rv, key, value)
+
+        if cache_size is not missing:
+            rv.cache = create_cache(cache_size)
+        else:
+            rv.cache = copy_cache(self.cache)
+
+        rv.extensions = {}
+        for key, value in self.extensions.items():
+            rv.extensions[key] = value.bind(rv)
+        if extensions is not missing:
+            rv.extensions.update(load_extensions(rv, extensions))
+
+        if enable_async is not missing:
+            rv.is_async = enable_async
+
+        return _environment_config_check(rv)
+
+    @property
+    def lexer(self) -> Lexer:
+        """The lexer for this environment."""
+        return get_lexer(self)
+
+    def iter_extensions(self) -> t.Iterator["Extension"]:
+        """Iterates over the extensions by priority."""
+        return iter(sorted(self.extensions.values(), key=lambda x: x.priority))
+
+    def getitem(
+        self, obj: t.Any, argument: t.Union[str, t.Any]
+    ) -> t.Union[t.Any, Undefined]:
+        """Get an item or attribute of an object but prefer the item."""
+        try:
+            return obj[argument]
+        except (AttributeError, TypeError, LookupError):
+            if isinstance(argument, str):
+                try:
+                    attr = str(argument)
+                except Exception:
+                    pass
+                else:
+                    try:
+                        return getattr(obj, attr)
+                    except AttributeError:
+                        pass
+            return self.undefined(obj=obj, name=argument)
+
+    def getattr(self, obj: t.Any, attribute: str) -> t.Any:
+        """Get an item or attribute of an object but prefer the attribute.
+        Unlike :meth:`getitem` the attribute *must* be a string.
+        """
+        try:
+            return getattr(obj, attribute)
+        except AttributeError:
+            pass
+        try:
+            return obj[attribute]
+        except (TypeError, LookupError, AttributeError):
+            return self.undefined(obj=obj, name=attribute)
+
+    def _filter_test_common(
+        self,
+        name: t.Union[str, Undefined],
+        value: t.Any,
+        args: t.Optional[t.Sequence[t.Any]],
+        kwargs: t.Optional[t.Mapping[str, t.Any]],
+        context: t.Optional[Context],
+        eval_ctx: t.Optional[EvalContext],
+        is_filter: bool,
+    ) -> t.Any:
+        if is_filter:
+            env_map = self.filters
+            type_name = "filter"
+        else:
+            env_map = self.tests
+            type_name = "test"
+
+        func = env_map.get(name)  # type: ignore
+
+        if func is None:
+            msg = f"No {type_name} named {name!r}."
+
+            if isinstance(name, Undefined):
+                try:
+                    name._fail_with_undefined_error()
+                except Exception as e:
+                    msg = f"{msg} ({e}; did you forget to quote the callable name?)"
+
+            raise TemplateRuntimeError(msg)
+
+        args = [value, *(args if args is not None else ())]
+        kwargs = kwargs if kwargs is not None else {}
+        pass_arg = _PassArg.from_obj(func)
+
+        if pass_arg is _PassArg.context:
+            if context is None:
+                raise TemplateRuntimeError(
+                    f"Attempted to invoke a context {type_name} without context."
+                )
+
+            args.insert(0, context)
+        elif pass_arg is _PassArg.eval_context:
+            if eval_ctx is None:
+                if context is not None:
+                    eval_ctx = context.eval_ctx
+                else:
+                    eval_ctx = EvalContext(self)
+
+            args.insert(0, eval_ctx)
+        elif pass_arg is _PassArg.environment:
+            args.insert(0, self)
+
+        return func(*args, **kwargs)
+
+    def call_filter(
+        self,
+        name: str,
+        value: t.Any,
+        args: t.Optional[t.Sequence[t.Any]] = None,
+        kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
+        context: t.Optional[Context] = None,
+        eval_ctx: t.Optional[EvalContext] = None,
+    ) -> t.Any:
+        """Invoke a filter on a value the same way the compiler does.
+
+        This might return a coroutine if the filter is running from an
+        environment in async mode and the filter supports async
+        execution. It's your responsibility to await this if needed.
+
+        .. versionadded:: 2.7
+        """
+        return self._filter_test_common(
+            name, value, args, kwargs, context, eval_ctx, True
+        )
+
+    def call_test(
+        self,
+        name: str,
+        value: t.Any,
+        args: t.Optional[t.Sequence[t.Any]] = None,
+        kwargs: t.Optional[t.Mapping[str, t.Any]] = None,
+        context: t.Optional[Context] = None,
+        eval_ctx: t.Optional[EvalContext] = None,
+    ) -> t.Any:
+        """Invoke a test on a value the same way the compiler does.
+
+        This might return a coroutine if the test is running from an
+        environment in async mode and the test supports async execution.
+        It's your responsibility to await this if needed.
+
+        .. versionchanged:: 3.0
+            Tests support ``@pass_context``, etc. decorators. Added
+            the ``context`` and ``eval_ctx`` parameters.
+
+        .. versionadded:: 2.7
+        """
+        return self._filter_test_common(
+            name, value, args, kwargs, context, eval_ctx, False
+        )
+
+    @internalcode
+    def parse(
+        self,
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> nodes.Template:
+        """Parse the sourcecode and return the abstract syntax tree.  This
+        tree of nodes is used by the compiler to convert the template into
+        executable source- or bytecode.  This is useful for debugging or to
+        extract information from templates.
+
+        If you are :ref:`developing Jinja extensions <writing-extensions>`
+        this gives you a good overview of the node tree generated.
+        """
+        try:
+            return self._parse(source, name, filename)
+        except TemplateSyntaxError:
+            self.handle_exception(source=source)
+
+    def _parse(
+        self, source: str, name: t.Optional[str], filename: t.Optional[str]
+    ) -> nodes.Template:
+        """Internal parsing function used by `parse` and `compile`."""
+        return Parser(self, source, name, filename).parse()
+
+    def lex(
+        self,
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> t.Iterator[t.Tuple[int, str, str]]:
+        """Lex the given sourcecode and return a generator that yields
+        tokens as tuples in the form ``(lineno, token_type, value)``.
+        This can be useful for :ref:`extension development <writing-extensions>`
+        and debugging templates.
+
+        This does not perform preprocessing.  If you want the preprocessing
+        of the extensions to be applied you have to filter source through
+        the :meth:`preprocess` method.
+        """
+        source = str(source)
+        try:
+            return self.lexer.tokeniter(source, name, filename)
+        except TemplateSyntaxError:
+            self.handle_exception(source=source)
+
+    def preprocess(
+        self,
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> str:
+        """Preprocesses the source with all extensions.  This is automatically
+        called for all parsing and compiling methods but *not* for :meth:`lex`
+        because there you usually only want the actual source tokenized.
+        """
+        return reduce(
+            lambda s, e: e.preprocess(s, name, filename),
+            self.iter_extensions(),
+            str(source),
+        )
+
+    def _tokenize(
+        self,
+        source: str,
+        name: t.Optional[str],
+        filename: t.Optional[str] = None,
+        state: t.Optional[str] = None,
+    ) -> TokenStream:
+        """Called by the parser to do the preprocessing and filtering
+        for all the extensions.  Returns a :class:`~jinja2.lexer.TokenStream`.
+        """
+        source = self.preprocess(source, name, filename)
+        stream = self.lexer.tokenize(source, name, filename, state)
+
+        for ext in self.iter_extensions():
+            stream = ext.filter_stream(stream)  # type: ignore
+
+            if not isinstance(stream, TokenStream):
+                stream = TokenStream(stream, name, filename)  # type: ignore
+
+        return stream
+
+    def _generate(
+        self,
+        source: nodes.Template,
+        name: t.Optional[str],
+        filename: t.Optional[str],
+        defer_init: bool = False,
+    ) -> str:
+        """Internal hook that can be overridden to hook a different generate
+        method in.
+
+        .. versionadded:: 2.5
+        """
+        return generate(  # type: ignore
+            source,
+            self,
+            name,
+            filename,
+            defer_init=defer_init,
+            optimized=self.optimized,
+        )
+
+    def _compile(self, source: str, filename: str) -> CodeType:
+        """Internal hook that can be overridden to hook a different compile
+        method in.
+
+        .. versionadded:: 2.5
+        """
+        return compile(source, filename, "exec")  # type: ignore
+
+    @typing.overload
+    def compile(  # type: ignore
+        self,
+        source: t.Union[str, nodes.Template],
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        raw: "te.Literal[False]" = False,
+        defer_init: bool = False,
+    ) -> CodeType:
+        ...
+
+    @typing.overload
+    def compile(
+        self,
+        source: t.Union[str, nodes.Template],
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        raw: "te.Literal[True]" = ...,
+        defer_init: bool = False,
+    ) -> str:
+        ...
+
+    @internalcode
+    def compile(
+        self,
+        source: t.Union[str, nodes.Template],
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        raw: bool = False,
+        defer_init: bool = False,
+    ) -> t.Union[str, CodeType]:
+        """Compile a node or template source code.  The `name` parameter is
+        the load name of the template after it was joined using
+        :meth:`join_path` if necessary, not the filename on the file system.
+        the `filename` parameter is the estimated filename of the template on
+        the file system.  If the template came from a database or memory this
+        can be omitted.
+
+        The return value of this method is a python code object.  If the `raw`
+        parameter is `True` the return value will be a string with python
+        code equivalent to the bytecode returned otherwise.  This method is
+        mainly used internally.
+
+        `defer_init` is use internally to aid the module code generator.  This
+        causes the generated code to be able to import without the global
+        environment variable to be set.
+
+        .. versionadded:: 2.4
+           `defer_init` parameter added.
+        """
+        source_hint = None
+        try:
+            if isinstance(source, str):
+                source_hint = source
+                source = self._parse(source, name, filename)
+            source = self._generate(source, name, filename, defer_init=defer_init)
+            if raw:
+                return source
+            if filename is None:
+                filename = "<template>"
+            return self._compile(source, filename)
+        except TemplateSyntaxError:
+            self.handle_exception(source=source_hint)
+
+    def compile_expression(
+        self, source: str, undefined_to_none: bool = True
+    ) -> "TemplateExpression":
+        """A handy helper method that returns a callable that accepts keyword
+        arguments that appear as variables in the expression.  If called it
+        returns the result of the expression.
+
+        This is useful if applications want to use the same rules as Jinja
+        in template "configuration files" or similar situations.
+
+        Example usage:
+
+        >>> env = Environment()
+        >>> expr = env.compile_expression('foo == 42')
+        >>> expr(foo=23)
+        False
+        >>> expr(foo=42)
+        True
+
+        Per default the return value is converted to `None` if the
+        expression returns an undefined value.  This can be changed
+        by setting `undefined_to_none` to `False`.
+
+        >>> env.compile_expression('var')() is None
+        True
+        >>> env.compile_expression('var', undefined_to_none=False)()
+        Undefined
+
+        .. versionadded:: 2.1
+        """
+        parser = Parser(self, source, state="variable")
+        try:
+            expr = parser.parse_expression()
+            if not parser.stream.eos:
+                raise TemplateSyntaxError(
+                    "chunk after expression", parser.stream.current.lineno, None, None
+                )
+            expr.set_environment(self)
+        except TemplateSyntaxError:
+            self.handle_exception(source=source)
+
+        body = [nodes.Assign(nodes.Name("result", "store"), expr, lineno=1)]
+        template = self.from_string(nodes.Template(body, lineno=1))
+        return TemplateExpression(template, undefined_to_none)
+
+    def compile_templates(
+        self,
+        target: t.Union[str, os.PathLike],
+        extensions: t.Optional[t.Collection[str]] = None,
+        filter_func: t.Optional[t.Callable[[str], bool]] = None,
+        zip: t.Optional[str] = "deflated",
+        log_function: t.Optional[t.Callable[[str], None]] = None,
+        ignore_errors: bool = True,
+    ) -> None:
+        """Finds all the templates the loader can find, compiles them
+        and stores them in `target`.  If `zip` is `None`, instead of in a
+        zipfile, the templates will be stored in a directory.
+        By default a deflate zip algorithm is used. To switch to
+        the stored algorithm, `zip` can be set to ``'stored'``.
+
+        `extensions` and `filter_func` are passed to :meth:`list_templates`.
+        Each template returned will be compiled to the target folder or
+        zipfile.
+
+        By default template compilation errors are ignored.  In case a
+        log function is provided, errors are logged.  If you want template
+        syntax errors to abort the compilation you can set `ignore_errors`
+        to `False` and you will get an exception on syntax errors.
+
+        .. versionadded:: 2.4
+        """
+        from .loaders import ModuleLoader
+
+        if log_function is None:
+
+            def log_function(x: str) -> None:
+                pass
+
+        assert log_function is not None
+        assert self.loader is not None, "No loader configured."
+
+        def write_file(filename: str, data: str) -> None:
+            if zip:
+                info = ZipInfo(filename)
+                info.external_attr = 0o755 << 16
+                zip_file.writestr(info, data)
+            else:
+                with open(os.path.join(target, filename), "wb") as f:
+                    f.write(data.encode("utf8"))
+
+        if zip is not None:
+            from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
+
+            zip_file = ZipFile(
+                target, "w", dict(deflated=ZIP_DEFLATED, stored=ZIP_STORED)[zip]
+            )
+            log_function(f"Compiling into Zip archive {target!r}")
+        else:
+            if not os.path.isdir(target):
+                os.makedirs(target)
+            log_function(f"Compiling into folder {target!r}")
+
+        try:
+            for name in self.list_templates(extensions, filter_func):
+                source, filename, _ = self.loader.get_source(self, name)
+                try:
+                    code = self.compile(source, name, filename, True, True)
+                except TemplateSyntaxError as e:
+                    if not ignore_errors:
+                        raise
+                    log_function(f'Could not compile "{name}": {e}')
+                    continue
+
+                filename = ModuleLoader.get_module_filename(name)
+
+                write_file(filename, code)
+                log_function(f'Compiled "{name}" as {filename}')
+        finally:
+            if zip:
+                zip_file.close()
+
+        log_function("Finished compiling templates")
+
+    def list_templates(
+        self,
+        extensions: t.Optional[t.Collection[str]] = None,
+        filter_func: t.Optional[t.Callable[[str], bool]] = None,
+    ) -> t.List[str]:
+        """Returns a list of templates for this environment.  This requires
+        that the loader supports the loader's
+        :meth:`~BaseLoader.list_templates` method.
+
+        If there are other files in the template folder besides the
+        actual templates, the returned list can be filtered.  There are two
+        ways: either `extensions` is set to a list of file extensions for
+        templates, or a `filter_func` can be provided which is a callable that
+        is passed a template name and should return `True` if it should end up
+        in the result list.
+
+        If the loader does not support that, a :exc:`TypeError` is raised.
+
+        .. versionadded:: 2.4
+        """
+        assert self.loader is not None, "No loader configured."
+        names = self.loader.list_templates()
+
+        if extensions is not None:
+            if filter_func is not None:
+                raise TypeError(
+                    "either extensions or filter_func can be passed, but not both"
+                )
+
+            def filter_func(x: str) -> bool:
+                return "." in x and x.rsplit(".", 1)[1] in extensions  # type: ignore
+
+        if filter_func is not None:
+            names = [name for name in names if filter_func(name)]
+
+        return names
+
+    def handle_exception(self, source: t.Optional[str] = None) -> "te.NoReturn":
+        """Exception handling helper.  This is used internally to either raise
+        rewritten exceptions or return a rendered traceback for the template.
+        """
+        from .debug import rewrite_traceback_stack
+
+        raise rewrite_traceback_stack(source=source)
+
+    def join_path(self, template: str, parent: str) -> str:
+        """Join a template with the parent.  By default all the lookups are
+        relative to the loader root so this method returns the `template`
+        parameter unchanged, but if the paths should be relative to the
+        parent template, this function can be used to calculate the real
+        template name.
+
+        Subclasses may override this method and implement template path
+        joining here.
+        """
+        return template
+
+    @internalcode
+    def _load_template(
+        self, name: str, globals: t.Optional[t.MutableMapping[str, t.Any]]
+    ) -> "Template":
+        if self.loader is None:
+            raise TypeError("no loader for this environment specified")
+        cache_key = (weakref.ref(self.loader), name)
+        if self.cache is not None:
+            template = self.cache.get(cache_key)
+            if template is not None and (
+                not self.auto_reload or template.is_up_to_date
+            ):
+                # template.globals is a ChainMap, modifying it will only
+                # affect the template, not the environment globals.
+                if globals:
+                    template.globals.update(globals)
+
+                return template
+
+        template = self.loader.load(self, name, self.make_globals(globals))
+
+        if self.cache is not None:
+            self.cache[cache_key] = template
+        return template
+
+    @internalcode
+    def get_template(
+        self,
+        name: t.Union[str, "Template"],
+        parent: t.Optional[str] = None,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
+        """Load a template by name with :attr:`loader` and return a
+        :class:`Template`. If the template does not exist a
+        :exc:`TemplateNotFound` exception is raised.
+
+        :param name: Name of the template to load. When loading
+            templates from the filesystem, "/" is used as the path
+            separator, even on Windows.
+        :param parent: The name of the parent template importing this
+            template. :meth:`join_path` can be used to implement name
+            transformations with this.
+        :param globals: Extend the environment :attr:`globals` with
+            these extra variables available for all renders of this
+            template. If the template has already been loaded and
+            cached, its globals are updated with any new items.
+
+        .. versionchanged:: 3.0
+            If a template is loaded from cache, ``globals`` will update
+            the template's globals instead of ignoring the new values.
+
+        .. versionchanged:: 2.4
+            If ``name`` is a :class:`Template` object it is returned
+            unchanged.
+        """
+        if isinstance(name, Template):
+            return name
+        if parent is not None:
+            name = self.join_path(name, parent)
+
+        return self._load_template(name, globals)
+
+    @internalcode
+    def select_template(
+        self,
+        names: t.Iterable[t.Union[str, "Template"]],
+        parent: t.Optional[str] = None,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
+        """Like :meth:`get_template`, but tries loading multiple names.
+        If none of the names can be loaded a :exc:`TemplatesNotFound`
+        exception is raised.
+
+        :param names: List of template names to try loading in order.
+        :param parent: The name of the parent template importing this
+            template. :meth:`join_path` can be used to implement name
+            transformations with this.
+        :param globals: Extend the environment :attr:`globals` with
+            these extra variables available for all renders of this
+            template. If the template has already been loaded and
+            cached, its globals are updated with any new items.
+
+        .. versionchanged:: 3.0
+            If a template is loaded from cache, ``globals`` will update
+            the template's globals instead of ignoring the new values.
+
+        .. versionchanged:: 2.11
+            If ``names`` is :class:`Undefined`, an :exc:`UndefinedError`
+            is raised instead. If no templates were found and ``names``
+            contains :class:`Undefined`, the message is more helpful.
+
+        .. versionchanged:: 2.4
+            If ``names`` contains a :class:`Template` object it is
+            returned unchanged.
+
+        .. versionadded:: 2.3
+        """
+        if isinstance(names, Undefined):
+            names._fail_with_undefined_error()
+
+        if not names:
+            raise TemplatesNotFound(
+                message="Tried to select from an empty list of templates."
+            )
+
+        for name in names:
+            if isinstance(name, Template):
+                return name
+            if parent is not None:
+                name = self.join_path(name, parent)
+            try:
+                return self._load_template(name, globals)
+            except (TemplateNotFound, UndefinedError):
+                pass
+        raise TemplatesNotFound(names)  # type: ignore
+
+    @internalcode
+    def get_or_select_template(
+        self,
+        template_name_or_list: t.Union[
+            str, "Template", t.List[t.Union[str, "Template"]]
+        ],
+        parent: t.Optional[str] = None,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
+        """Use :meth:`select_template` if an iterable of template names
+        is given, or :meth:`get_template` if one name is given.
+
+        .. versionadded:: 2.3
+        """
+        if isinstance(template_name_or_list, (str, Undefined)):
+            return self.get_template(template_name_or_list, parent, globals)
+        elif isinstance(template_name_or_list, Template):
+            return template_name_or_list
+        return self.select_template(template_name_or_list, parent, globals)
+
+    def from_string(
+        self,
+        source: t.Union[str, nodes.Template],
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+        template_class: t.Optional[t.Type["Template"]] = None,
+    ) -> "Template":
+        """Load a template from a source string without using
+        :attr:`loader`.
+
+        :param source: Jinja source to compile into a template.
+        :param globals: Extend the environment :attr:`globals` with
+            these extra variables available for all renders of this
+            template. If the template has already been loaded and
+            cached, its globals are updated with any new items.
+        :param template_class: Return an instance of this
+            :class:`Template` class.
+        """
+        gs = self.make_globals(globals)
+        cls = template_class or self.template_class
+        return cls.from_code(self, self.compile(source), gs, None)
+
+    def make_globals(
+        self, d: t.Optional[t.MutableMapping[str, t.Any]]
+    ) -> t.MutableMapping[str, t.Any]:
+        """Make the globals map for a template. Any given template
+        globals overlay the environment :attr:`globals`.
+
+        Returns a :class:`collections.ChainMap`. This allows any changes
+        to a template's globals to only affect that template, while
+        changes to the environment's globals are still reflected.
+        However, avoid modifying any globals after a template is loaded.
+
+        :param d: Dict of template-specific globals.
+
+        .. versionchanged:: 3.0
+            Use :class:`collections.ChainMap` to always prevent mutating
+            environment globals.
+        """
+        if d is None:
+            d = {}
+
+        return ChainMap(d, self.globals)
+
+
+class Template:
+    """A compiled template that can be rendered.
+
+    Use the methods on :class:`Environment` to create or load templates.
+    The environment is used to configure how templates are compiled and
+    behave.
+
+    It is also possible to create a template object directly. This is
+    not usually recommended. The constructor takes most of the same
+    arguments as :class:`Environment`. All templates created with the
+    same environment arguments share the same ephemeral ``Environment``
+    instance behind the scenes.
+
+    A template object should be considered immutable. Modifications on
+    the object are not supported.
+    """
+
+    #: Type of environment to create when creating a template directly
+    #: rather than through an existing environment.
+    environment_class: t.Type[Environment] = Environment
+
+    environment: Environment
+    globals: t.MutableMapping[str, t.Any]
+    name: t.Optional[str]
+    filename: t.Optional[str]
+    blocks: t.Dict[str, t.Callable[[Context], t.Iterator[str]]]
+    root_render_func: t.Callable[[Context], t.Iterator[str]]
+    _module: t.Optional["TemplateModule"]
+    _debug_info: str
+    _uptodate: t.Optional[t.Callable[[], bool]]
+
+    def __new__(
+        cls,
+        source: t.Union[str, nodes.Template],
+        block_start_string: str = BLOCK_START_STRING,
+        block_end_string: str = BLOCK_END_STRING,
+        variable_start_string: str = VARIABLE_START_STRING,
+        variable_end_string: str = VARIABLE_END_STRING,
+        comment_start_string: str = COMMENT_START_STRING,
+        comment_end_string: str = COMMENT_END_STRING,
+        line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX,
+        line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX,
+        trim_blocks: bool = TRIM_BLOCKS,
+        lstrip_blocks: bool = LSTRIP_BLOCKS,
+        newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE,
+        keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE,
+        extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (),
+        optimized: bool = True,
+        undefined: t.Type[Undefined] = Undefined,
+        finalize: t.Optional[t.Callable[..., t.Any]] = None,
+        autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False,
+        enable_async: bool = False,
+    ) -> t.Any:  # it returns a `Template`, but this breaks the sphinx build...
+        env = get_spontaneous_environment(
+            cls.environment_class,  # type: ignore
+            block_start_string,
+            block_end_string,
+            variable_start_string,
+            variable_end_string,
+            comment_start_string,
+            comment_end_string,
+            line_statement_prefix,
+            line_comment_prefix,
+            trim_blocks,
+            lstrip_blocks,
+            newline_sequence,
+            keep_trailing_newline,
+            frozenset(extensions),
+            optimized,
+            undefined,  # type: ignore
+            finalize,
+            autoescape,
+            None,
+            0,
+            False,
+            None,
+            enable_async,
+        )
+        return env.from_string(source, template_class=cls)
+
+    @classmethod
+    def from_code(
+        cls,
+        environment: Environment,
+        code: CodeType,
+        globals: t.MutableMapping[str, t.Any],
+        uptodate: t.Optional[t.Callable[[], bool]] = None,
+    ) -> "Template":
+        """Creates a template object from compiled code and the globals.  This
+        is used by the loaders and environment to create a template object.
+        """
+        namespace = {"environment": environment, "__file__": code.co_filename}
+        exec(code, namespace)
+        rv = cls._from_namespace(environment, namespace, globals)
+        rv._uptodate = uptodate
+        return rv
+
+    @classmethod
+    def from_module_dict(
+        cls,
+        environment: Environment,
+        module_dict: t.MutableMapping[str, t.Any],
+        globals: t.MutableMapping[str, t.Any],
+    ) -> "Template":
+        """Creates a template object from a module.  This is used by the
+        module loader to create a template object.
+
+        .. versionadded:: 2.4
+        """
+        return cls._from_namespace(environment, module_dict, globals)
+
+    @classmethod
+    def _from_namespace(
+        cls,
+        environment: Environment,
+        namespace: t.MutableMapping[str, t.Any],
+        globals: t.MutableMapping[str, t.Any],
+    ) -> "Template":
+        t: "Template" = object.__new__(cls)
+        t.environment = environment
+        t.globals = globals
+        t.name = namespace["name"]
+        t.filename = namespace["__file__"]
+        t.blocks = namespace["blocks"]
+
+        # render function and module
+        t.root_render_func = namespace["root"]  # type: ignore
+        t._module = None
+
+        # debug and loader helpers
+        t._debug_info = namespace["debug_info"]
+        t._uptodate = None
+
+        # store the reference
+        namespace["environment"] = environment
+        namespace["__jinja_template__"] = t
+
+        return t
+
+    def render(self, *args: t.Any, **kwargs: t.Any) -> str:
+        """This method accepts the same arguments as the `dict` constructor:
+        A dict, a dict subclass or some keyword arguments.  If no arguments
+        are given the context will be empty.  These two calls do the same::
+
+            template.render(knights='that say nih')
+            template.render({'knights': 'that say nih'})
+
+        This will return the rendered template as a string.
+        """
+        if self.environment.is_async:
+            import asyncio
+
+            close = False
+
+            try:
+                loop = asyncio.get_running_loop()
+            except RuntimeError:
+                loop = asyncio.new_event_loop()
+                close = True
+
+            try:
+                return loop.run_until_complete(self.render_async(*args, **kwargs))
+            finally:
+                if close:
+                    loop.close()
+
+        ctx = self.new_context(dict(*args, **kwargs))
+
+        try:
+            return self.environment.concat(self.root_render_func(ctx))  # type: ignore
+        except Exception:
+            self.environment.handle_exception()
+
+    async def render_async(self, *args: t.Any, **kwargs: t.Any) -> str:
+        """This works similar to :meth:`render` but returns a coroutine
+        that when awaited returns the entire rendered template string.  This
+        requires the async feature to be enabled.
+
+        Example usage::
+
+            await template.render_async(knights='that say nih; asynchronously')
+        """
+        if not self.environment.is_async:
+            raise RuntimeError(
+                "The environment was not created with async mode enabled."
+            )
+
+        ctx = self.new_context(dict(*args, **kwargs))
+
+        try:
+            return self.environment.concat(  # type: ignore
+                [n async for n in self.root_render_func(ctx)]  # type: ignore
+            )
+        except Exception:
+            return self.environment.handle_exception()
+
+    def stream(self, *args: t.Any, **kwargs: t.Any) -> "TemplateStream":
+        """Works exactly like :meth:`generate` but returns a
+        :class:`TemplateStream`.
+        """
+        return TemplateStream(self.generate(*args, **kwargs))
+
+    def generate(self, *args: t.Any, **kwargs: t.Any) -> t.Iterator[str]:
+        """For very large templates it can be useful to not render the whole
+        template at once but evaluate each statement after another and yield
+        piece for piece.  This method basically does exactly that and returns
+        a generator that yields one item after another as strings.
+
+        It accepts the same arguments as :meth:`render`.
+        """
+        if self.environment.is_async:
+            import asyncio
+
+            async def to_list() -> t.List[str]:
+                return [x async for x in self.generate_async(*args, **kwargs)]
+
+            yield from asyncio.run(to_list())
+            return
+
+        ctx = self.new_context(dict(*args, **kwargs))
+
+        try:
+            yield from self.root_render_func(ctx)  # type: ignore
+        except Exception:
+            yield self.environment.handle_exception()
+
+    async def generate_async(
+        self, *args: t.Any, **kwargs: t.Any
+    ) -> t.AsyncIterator[str]:
+        """An async version of :meth:`generate`.  Works very similarly but
+        returns an async iterator instead.
+        """
+        if not self.environment.is_async:
+            raise RuntimeError(
+                "The environment was not created with async mode enabled."
+            )
+
+        ctx = self.new_context(dict(*args, **kwargs))
+
+        try:
+            async for event in self.root_render_func(ctx):  # type: ignore
+                yield event
+        except Exception:
+            yield self.environment.handle_exception()
+
+    def new_context(
+        self,
+        vars: t.Optional[t.Dict[str, t.Any]] = None,
+        shared: bool = False,
+        locals: t.Optional[t.Mapping[str, t.Any]] = None,
+    ) -> Context:
+        """Create a new :class:`Context` for this template.  The vars
+        provided will be passed to the template.  Per default the globals
+        are added to the context.  If shared is set to `True` the data
+        is passed as is to the context without adding the globals.
+
+        `locals` can be a dict of local variables for internal usage.
+        """
+        return new_context(
+            self.environment, self.name, self.blocks, vars, shared, self.globals, locals
+        )
+
+    def make_module(
+        self,
+        vars: t.Optional[t.Dict[str, t.Any]] = None,
+        shared: bool = False,
+        locals: t.Optional[t.Mapping[str, t.Any]] = None,
+    ) -> "TemplateModule":
+        """This method works like the :attr:`module` attribute when called
+        without arguments but it will evaluate the template on every call
+        rather than caching it.  It's also possible to provide
+        a dict which is then used as context.  The arguments are the same
+        as for the :meth:`new_context` method.
+        """
+        ctx = self.new_context(vars, shared, locals)
+        return TemplateModule(self, ctx)
+
+    async def make_module_async(
+        self,
+        vars: t.Optional[t.Dict[str, t.Any]] = None,
+        shared: bool = False,
+        locals: t.Optional[t.Mapping[str, t.Any]] = None,
+    ) -> "TemplateModule":
+        """As template module creation can invoke template code for
+        asynchronous executions this method must be used instead of the
+        normal :meth:`make_module` one.  Likewise the module attribute
+        becomes unavailable in async mode.
+        """
+        ctx = self.new_context(vars, shared, locals)
+        return TemplateModule(
+            self, ctx, [x async for x in self.root_render_func(ctx)]  # type: ignore
+        )
+
+    @internalcode
+    def _get_default_module(self, ctx: t.Optional[Context] = None) -> "TemplateModule":
+        """If a context is passed in, this means that the template was
+        imported. Imported templates have access to the current
+        template's globals by default, but they can only be accessed via
+        the context during runtime.
+
+        If there are new globals, we need to create a new module because
+        the cached module is already rendered and will not have access
+        to globals from the current context. This new module is not
+        cached because the template can be imported elsewhere, and it
+        should have access to only the current template's globals.
+        """
+        if self.environment.is_async:
+            raise RuntimeError("Module is not available in async mode.")
+
+        if ctx is not None:
+            keys = ctx.globals_keys - self.globals.keys()
+
+            if keys:
+                return self.make_module({k: ctx.parent[k] for k in keys})
+
+        if self._module is None:
+            self._module = self.make_module()
+
+        return self._module
+
+    async def _get_default_module_async(
+        self, ctx: t.Optional[Context] = None
+    ) -> "TemplateModule":
+        if ctx is not None:
+            keys = ctx.globals_keys - self.globals.keys()
+
+            if keys:
+                return await self.make_module_async({k: ctx.parent[k] for k in keys})
+
+        if self._module is None:
+            self._module = await self.make_module_async()
+
+        return self._module
+
+    @property
+    def module(self) -> "TemplateModule":
+        """The template as module.  This is used for imports in the
+        template runtime but is also useful if one wants to access
+        exported template variables from the Python layer:
+
+        >>> t = Template('{% macro foo() %}42{% endmacro %}23')
+        >>> str(t.module)
+        '23'
+        >>> t.module.foo() == u'42'
+        True
+
+        This attribute is not available if async mode is enabled.
+        """
+        return self._get_default_module()
+
+    def get_corresponding_lineno(self, lineno: int) -> int:
+        """Return the source line number of a line number in the
+        generated bytecode as they are not in sync.
+        """
+        for template_line, code_line in reversed(self.debug_info):
+            if code_line <= lineno:
+                return template_line
+        return 1
+
+    @property
+    def is_up_to_date(self) -> bool:
+        """If this variable is `False` there is a newer version available."""
+        if self._uptodate is None:
+            return True
+        return self._uptodate()
+
+    @property
+    def debug_info(self) -> t.List[t.Tuple[int, int]]:
+        """The debug info mapping."""
+        if self._debug_info:
+            return [
+                tuple(map(int, x.split("=")))  # type: ignore
+                for x in self._debug_info.split("&")
+            ]
+
+        return []
+
+    def __repr__(self) -> str:
+        if self.name is None:
+            name = f"memory:{id(self):x}"
+        else:
+            name = repr(self.name)
+        return f"<{type(self).__name__} {name}>"
+
+
+class TemplateModule:
+    """Represents an imported template.  All the exported names of the
+    template are available as attributes on this object.  Additionally
+    converting it into a string renders the contents.
+    """
+
+    def __init__(
+        self,
+        template: Template,
+        context: Context,
+        body_stream: t.Optional[t.Iterable[str]] = None,
+    ) -> None:
+        if body_stream is None:
+            if context.environment.is_async:
+                raise RuntimeError(
+                    "Async mode requires a body stream to be passed to"
+                    " a template module. Use the async methods of the"
+                    " API you are using."
+                )
+
+            body_stream = list(template.root_render_func(context))  # type: ignore
+
+        self._body_stream = body_stream
+        self.__dict__.update(context.get_exported())
+        self.__name__ = template.name
+
+    def __html__(self) -> Markup:
+        return Markup(concat(self._body_stream))
+
+    def __str__(self) -> str:
+        return concat(self._body_stream)
+
+    def __repr__(self) -> str:
+        if self.__name__ is None:
+            name = f"memory:{id(self):x}"
+        else:
+            name = repr(self.__name__)
+        return f"<{type(self).__name__} {name}>"
+
+
+class TemplateExpression:
+    """The :meth:`jinja2.Environment.compile_expression` method returns an
+    instance of this object.  It encapsulates the expression-like access
+    to the template with an expression it wraps.
+    """
+
+    def __init__(self, template: Template, undefined_to_none: bool) -> None:
+        self._template = template
+        self._undefined_to_none = undefined_to_none
+
+    def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Optional[t.Any]:
+        context = self._template.new_context(dict(*args, **kwargs))
+        consume(self._template.root_render_func(context))  # type: ignore
+        rv = context.vars["result"]
+        if self._undefined_to_none and isinstance(rv, Undefined):
+            rv = None
+        return rv
+
+
+class TemplateStream:
+    """A template stream works pretty much like an ordinary python generator
+    but it can buffer multiple items to reduce the number of total iterations.
+    Per default the output is unbuffered which means that for every unbuffered
+    instruction in the template one string is yielded.
+
+    If buffering is enabled with a buffer size of 5, five items are combined
+    into a new string.  This is mainly useful if you are streaming
+    big templates to a client via WSGI which flushes after each iteration.
+    """
+
+    def __init__(self, gen: t.Iterator[str]) -> None:
+        self._gen = gen
+        self.disable_buffering()
+
+    def dump(
+        self,
+        fp: t.Union[str, t.IO],
+        encoding: t.Optional[str] = None,
+        errors: t.Optional[str] = "strict",
+    ) -> None:
+        """Dump the complete stream into a file or file-like object.
+        Per default strings are written, if you want to encode
+        before writing specify an `encoding`.
+
+        Example usage::
+
+            Template('Hello {{ name }}!').stream(name='foo').dump('hello.html')
+        """
+        close = False
+
+        if isinstance(fp, str):
+            if encoding is None:
+                encoding = "utf-8"
+
+            fp = open(fp, "wb")
+            close = True
+        try:
+            if encoding is not None:
+                iterable = (x.encode(encoding, errors) for x in self)  # type: ignore
+            else:
+                iterable = self  # type: ignore
+
+            if hasattr(fp, "writelines"):
+                fp.writelines(iterable)
+            else:
+                for item in iterable:
+                    fp.write(item)
+        finally:
+            if close:
+                fp.close()
+
+    def disable_buffering(self) -> None:
+        """Disable the output buffering."""
+        self._next = partial(next, self._gen)
+        self.buffered = False
+
+    def _buffered_generator(self, size: int) -> t.Iterator[str]:
+        buf: t.List[str] = []
+        c_size = 0
+        push = buf.append
+
+        while True:
+            try:
+                while c_size < size:
+                    c = next(self._gen)
+                    push(c)
+                    if c:
+                        c_size += 1
+            except StopIteration:
+                if not c_size:
+                    return
+            yield concat(buf)
+            del buf[:]
+            c_size = 0
+
+    def enable_buffering(self, size: int = 5) -> None:
+        """Enable buffering.  Buffer `size` items before yielding them."""
+        if size <= 1:
+            raise ValueError("buffer size too small")
+
+        self.buffered = True
+        self._next = partial(next, self._buffered_generator(size))
+
+    def __iter__(self) -> "TemplateStream":
+        return self
+
+    def __next__(self) -> str:
+        return self._next()  # type: ignore
+
+
+# hook in default template class.  if anyone reads this comment: ignore that
+# it's possible to use custom templates ;-)
+Environment.template_class = Template
diff --git a/venv/lib/python3.9/site-packages/jinja2/exceptions.py b/venv/lib/python3.9/site-packages/jinja2/exceptions.py
new file mode 100644
index 0000000..082ebe8
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/exceptions.py
@@ -0,0 +1,166 @@
+import typing as t
+
+if t.TYPE_CHECKING:
+    from .runtime import Undefined
+
+
+class TemplateError(Exception):
+    """Baseclass for all template errors."""
+
+    def __init__(self, message: t.Optional[str] = None) -> None:
+        super().__init__(message)
+
+    @property
+    def message(self) -> t.Optional[str]:
+        return self.args[0] if self.args else None
+
+
+class TemplateNotFound(IOError, LookupError, TemplateError):
+    """Raised if a template does not exist.
+
+    .. versionchanged:: 2.11
+        If the given name is :class:`Undefined` and no message was
+        provided, an :exc:`UndefinedError` is raised.
+    """
+
+    # Silence the Python warning about message being deprecated since
+    # it's not valid here.
+    message: t.Optional[str] = None
+
+    def __init__(
+        self,
+        name: t.Optional[t.Union[str, "Undefined"]],
+        message: t.Optional[str] = None,
+    ) -> None:
+        IOError.__init__(self, name)
+
+        if message is None:
+            from .runtime import Undefined
+
+            if isinstance(name, Undefined):
+                name._fail_with_undefined_error()
+
+            message = name
+
+        self.message = message
+        self.name = name
+        self.templates = [name]
+
+    def __str__(self) -> str:
+        return str(self.message)
+
+
+class TemplatesNotFound(TemplateNotFound):
+    """Like :class:`TemplateNotFound` but raised if multiple templates
+    are selected.  This is a subclass of :class:`TemplateNotFound`
+    exception, so just catching the base exception will catch both.
+
+    .. versionchanged:: 2.11
+        If a name in the list of names is :class:`Undefined`, a message
+        about it being undefined is shown rather than the empty string.
+
+    .. versionadded:: 2.2
+    """
+
+    def __init__(
+        self,
+        names: t.Sequence[t.Union[str, "Undefined"]] = (),
+        message: t.Optional[str] = None,
+    ) -> None:
+        if message is None:
+            from .runtime import Undefined
+
+            parts = []
+
+            for name in names:
+                if isinstance(name, Undefined):
+                    parts.append(name._undefined_message)
+                else:
+                    parts.append(name)
+
+            parts_str = ", ".join(map(str, parts))
+            message = f"none of the templates given were found: {parts_str}"
+
+        super().__init__(names[-1] if names else None, message)
+        self.templates = list(names)
+
+
+class TemplateSyntaxError(TemplateError):
+    """Raised to tell the user that there is a problem with the template."""
+
+    def __init__(
+        self,
+        message: str,
+        lineno: int,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> None:
+        super().__init__(message)
+        self.lineno = lineno
+        self.name = name
+        self.filename = filename
+        self.source: t.Optional[str] = None
+
+        # this is set to True if the debug.translate_syntax_error
+        # function translated the syntax error into a new traceback
+        self.translated = False
+
+    def __str__(self) -> str:
+        # for translated errors we only return the message
+        if self.translated:
+            return t.cast(str, self.message)
+
+        # otherwise attach some stuff
+        location = f"line {self.lineno}"
+        name = self.filename or self.name
+        if name:
+            location = f'File "{name}", {location}'
+        lines = [t.cast(str, self.message), "  " + location]
+
+        # if the source is set, add the line to the output
+        if self.source is not None:
+            try:
+                line = self.source.splitlines()[self.lineno - 1]
+            except IndexError:
+                pass
+            else:
+                lines.append("    " + line.strip())
+
+        return "\n".join(lines)
+
+    def __reduce__(self):  # type: ignore
+        # https://bugs.python.org/issue1692335 Exceptions that take
+        # multiple required arguments have problems with pickling.
+        # Without this, raises TypeError: __init__() missing 1 required
+        # positional argument: 'lineno'
+        return self.__class__, (self.message, self.lineno, self.name, self.filename)
+
+
+class TemplateAssertionError(TemplateSyntaxError):
+    """Like a template syntax error, but covers cases where something in the
+    template caused an error at compile time that wasn't necessarily caused
+    by a syntax error.  However it's a direct subclass of
+    :exc:`TemplateSyntaxError` and has the same attributes.
+    """
+
+
+class TemplateRuntimeError(TemplateError):
+    """A generic runtime error in the template engine.  Under some situations
+    Jinja may raise this exception.
+    """
+
+
+class UndefinedError(TemplateRuntimeError):
+    """Raised if a template tries to operate on :class:`Undefined`."""
+
+
+class SecurityError(TemplateRuntimeError):
+    """Raised if a template tries to do something insecure if the
+    sandbox is enabled.
+    """
+
+
+class FilterArgumentError(TemplateRuntimeError):
+    """This error is raised if a filter was called with inappropriate
+    arguments
+    """
diff --git a/venv/lib/python3.9/site-packages/jinja2/ext.py b/venv/lib/python3.9/site-packages/jinja2/ext.py
new file mode 100644
index 0000000..d555054
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/ext.py
@@ -0,0 +1,859 @@
+"""Extension API for adding custom tags and behavior."""
+import pprint
+import re
+import typing as t
+
+from markupsafe import Markup
+
+from . import defaults
+from . import nodes
+from .environment import Environment
+from .exceptions import TemplateAssertionError
+from .exceptions import TemplateSyntaxError
+from .runtime import concat  # type: ignore
+from .runtime import Context
+from .runtime import Undefined
+from .utils import import_string
+from .utils import pass_context
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .lexer import Token
+    from .lexer import TokenStream
+    from .parser import Parser
+
+    class _TranslationsBasic(te.Protocol):
+        def gettext(self, message: str) -> str:
+            ...
+
+        def ngettext(self, singular: str, plural: str, n: int) -> str:
+            pass
+
+    class _TranslationsContext(_TranslationsBasic):
+        def pgettext(self, context: str, message: str) -> str:
+            ...
+
+        def npgettext(self, context: str, singular: str, plural: str, n: int) -> str:
+            ...
+
+    _SupportedTranslations = t.Union[_TranslationsBasic, _TranslationsContext]
+
+
+# I18N functions available in Jinja templates. If the I18N library
+# provides ugettext, it will be assigned to gettext.
+GETTEXT_FUNCTIONS: t.Tuple[str, ...] = (
+    "_",
+    "gettext",
+    "ngettext",
+    "pgettext",
+    "npgettext",
+)
+_ws_re = re.compile(r"\s*\n\s*")
+
+
+class Extension:
+    """Extensions can be used to add extra functionality to the Jinja template
+    system at the parser level.  Custom extensions are bound to an environment
+    but may not store environment specific data on `self`.  The reason for
+    this is that an extension can be bound to another environment (for
+    overlays) by creating a copy and reassigning the `environment` attribute.
+
+    As extensions are created by the environment they cannot accept any
+    arguments for configuration.  One may want to work around that by using
+    a factory function, but that is not possible as extensions are identified
+    by their import name.  The correct way to configure the extension is
+    storing the configuration values on the environment.  Because this way the
+    environment ends up acting as central configuration storage the
+    attributes may clash which is why extensions have to ensure that the names
+    they choose for configuration are not too generic.  ``prefix`` for example
+    is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
+    name as includes the name of the extension (fragment cache).
+    """
+
+    identifier: t.ClassVar[str]
+
+    def __init_subclass__(cls) -> None:
+        cls.identifier = f"{cls.__module__}.{cls.__name__}"
+
+    #: if this extension parses this is the list of tags it's listening to.
+    tags: t.Set[str] = set()
+
+    #: the priority of that extension.  This is especially useful for
+    #: extensions that preprocess values.  A lower value means higher
+    #: priority.
+    #:
+    #: .. versionadded:: 2.4
+    priority = 100
+
+    def __init__(self, environment: Environment) -> None:
+        self.environment = environment
+
+    def bind(self, environment: Environment) -> "Extension":
+        """Create a copy of this extension bound to another environment."""
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.environment = environment
+        return rv
+
+    def preprocess(
+        self, source: str, name: t.Optional[str], filename: t.Optional[str] = None
+    ) -> str:
+        """This method is called before the actual lexing and can be used to
+        preprocess the source.  The `filename` is optional.  The return value
+        must be the preprocessed source.
+        """
+        return source
+
+    def filter_stream(
+        self, stream: "TokenStream"
+    ) -> t.Union["TokenStream", t.Iterable["Token"]]:
+        """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
+        to filter tokens returned.  This method has to return an iterable of
+        :class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
+        :class:`~jinja2.lexer.TokenStream`.
+        """
+        return stream
+
+    def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]:
+        """If any of the :attr:`tags` matched this method is called with the
+        parser as first argument.  The token the parser stream is pointing at
+        is the name token that matched.  This method has to return one or a
+        list of multiple nodes.
+        """
+        raise NotImplementedError()
+
+    def attr(
+        self, name: str, lineno: t.Optional[int] = None
+    ) -> nodes.ExtensionAttribute:
+        """Return an attribute node for the current extension.  This is useful
+        to pass constants on extensions to generated template code.
+
+        ::
+
+            self.attr('_my_attribute', lineno=lineno)
+        """
+        return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
+
+    def call_method(
+        self,
+        name: str,
+        args: t.Optional[t.List[nodes.Expr]] = None,
+        kwargs: t.Optional[t.List[nodes.Keyword]] = None,
+        dyn_args: t.Optional[nodes.Expr] = None,
+        dyn_kwargs: t.Optional[nodes.Expr] = None,
+        lineno: t.Optional[int] = None,
+    ) -> nodes.Call:
+        """Call a method of the extension.  This is a shortcut for
+        :meth:`attr` + :class:`jinja2.nodes.Call`.
+        """
+        if args is None:
+            args = []
+        if kwargs is None:
+            kwargs = []
+        return nodes.Call(
+            self.attr(name, lineno=lineno),
+            args,
+            kwargs,
+            dyn_args,
+            dyn_kwargs,
+            lineno=lineno,
+        )
+
+
+@pass_context
+def _gettext_alias(
+    __context: Context, *args: t.Any, **kwargs: t.Any
+) -> t.Union[t.Any, Undefined]:
+    return __context.call(__context.resolve("gettext"), *args, **kwargs)
+
+
+def _make_new_gettext(func: t.Callable[[str], str]) -> t.Callable[..., str]:
+    @pass_context
+    def gettext(__context: Context, __string: str, **variables: t.Any) -> str:
+        rv = __context.call(func, __string)
+        if __context.eval_ctx.autoescape:
+            rv = Markup(rv)
+        # Always treat as a format string, even if there are no
+        # variables. This makes translation strings more consistent
+        # and predictable. This requires escaping
+        return rv % variables  # type: ignore
+
+    return gettext
+
+
+def _make_new_ngettext(func: t.Callable[[str, str, int], str]) -> t.Callable[..., str]:
+    @pass_context
+    def ngettext(
+        __context: Context,
+        __singular: str,
+        __plural: str,
+        __num: int,
+        **variables: t.Any,
+    ) -> str:
+        variables.setdefault("num", __num)
+        rv = __context.call(func, __singular, __plural, __num)
+        if __context.eval_ctx.autoescape:
+            rv = Markup(rv)
+        # Always treat as a format string, see gettext comment above.
+        return rv % variables  # type: ignore
+
+    return ngettext
+
+
+def _make_new_pgettext(func: t.Callable[[str, str], str]) -> t.Callable[..., str]:
+    @pass_context
+    def pgettext(
+        __context: Context, __string_ctx: str, __string: str, **variables: t.Any
+    ) -> str:
+        variables.setdefault("context", __string_ctx)
+        rv = __context.call(func, __string_ctx, __string)
+
+        if __context.eval_ctx.autoescape:
+            rv = Markup(rv)
+
+        # Always treat as a format string, see gettext comment above.
+        return rv % variables  # type: ignore
+
+    return pgettext
+
+
+def _make_new_npgettext(
+    func: t.Callable[[str, str, str, int], str]
+) -> t.Callable[..., str]:
+    @pass_context
+    def npgettext(
+        __context: Context,
+        __string_ctx: str,
+        __singular: str,
+        __plural: str,
+        __num: int,
+        **variables: t.Any,
+    ) -> str:
+        variables.setdefault("context", __string_ctx)
+        variables.setdefault("num", __num)
+        rv = __context.call(func, __string_ctx, __singular, __plural, __num)
+
+        if __context.eval_ctx.autoescape:
+            rv = Markup(rv)
+
+        # Always treat as a format string, see gettext comment above.
+        return rv % variables  # type: ignore
+
+    return npgettext
+
+
+class InternationalizationExtension(Extension):
+    """This extension adds gettext support to Jinja."""
+
+    tags = {"trans"}
+
+    # TODO: the i18n extension is currently reevaluating values in a few
+    # situations.  Take this example:
+    #   {% trans count=something() %}{{ count }} foo{% pluralize
+    #     %}{{ count }} fooss{% endtrans %}
+    # something is called twice here.  One time for the gettext value and
+    # the other time for the n-parameter of the ngettext function.
+
+    def __init__(self, environment: Environment) -> None:
+        super().__init__(environment)
+        environment.globals["_"] = _gettext_alias
+        environment.extend(
+            install_gettext_translations=self._install,
+            install_null_translations=self._install_null,
+            install_gettext_callables=self._install_callables,
+            uninstall_gettext_translations=self._uninstall,
+            extract_translations=self._extract,
+            newstyle_gettext=False,
+        )
+
+    def _install(
+        self, translations: "_SupportedTranslations", newstyle: t.Optional[bool] = None
+    ) -> None:
+        # ugettext and ungettext are preferred in case the I18N library
+        # is providing compatibility with older Python versions.
+        gettext = getattr(translations, "ugettext", None)
+        if gettext is None:
+            gettext = translations.gettext
+        ngettext = getattr(translations, "ungettext", None)
+        if ngettext is None:
+            ngettext = translations.ngettext
+
+        pgettext = getattr(translations, "pgettext", None)
+        npgettext = getattr(translations, "npgettext", None)
+        self._install_callables(
+            gettext, ngettext, newstyle=newstyle, pgettext=pgettext, npgettext=npgettext
+        )
+
+    def _install_null(self, newstyle: t.Optional[bool] = None) -> None:
+        import gettext
+
+        translations = gettext.NullTranslations()
+
+        if hasattr(translations, "pgettext"):
+            # Python < 3.8
+            pgettext = translations.pgettext  # type: ignore
+        else:
+
+            def pgettext(c: str, s: str) -> str:
+                return s
+
+        if hasattr(translations, "npgettext"):
+            npgettext = translations.npgettext  # type: ignore
+        else:
+
+            def npgettext(c: str, s: str, p: str, n: int) -> str:
+                return s if n == 1 else p
+
+        self._install_callables(
+            gettext=translations.gettext,
+            ngettext=translations.ngettext,
+            newstyle=newstyle,
+            pgettext=pgettext,
+            npgettext=npgettext,
+        )
+
+    def _install_callables(
+        self,
+        gettext: t.Callable[[str], str],
+        ngettext: t.Callable[[str, str, int], str],
+        newstyle: t.Optional[bool] = None,
+        pgettext: t.Optional[t.Callable[[str, str], str]] = None,
+        npgettext: t.Optional[t.Callable[[str, str, str, int], str]] = None,
+    ) -> None:
+        if newstyle is not None:
+            self.environment.newstyle_gettext = newstyle  # type: ignore
+        if self.environment.newstyle_gettext:  # type: ignore
+            gettext = _make_new_gettext(gettext)
+            ngettext = _make_new_ngettext(ngettext)
+
+            if pgettext is not None:
+                pgettext = _make_new_pgettext(pgettext)
+
+            if npgettext is not None:
+                npgettext = _make_new_npgettext(npgettext)
+
+        self.environment.globals.update(
+            gettext=gettext, ngettext=ngettext, pgettext=pgettext, npgettext=npgettext
+        )
+
+    def _uninstall(self, translations: "_SupportedTranslations") -> None:
+        for key in ("gettext", "ngettext", "pgettext", "npgettext"):
+            self.environment.globals.pop(key, None)
+
+    def _extract(
+        self,
+        source: t.Union[str, nodes.Template],
+        gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
+    ) -> t.Iterator[
+        t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]]
+    ]:
+        if isinstance(source, str):
+            source = self.environment.parse(source)
+        return extract_from_ast(source, gettext_functions)
+
+    def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]:
+        """Parse a translatable tag."""
+        lineno = next(parser.stream).lineno
+
+        context = None
+        context_token = parser.stream.next_if("string")
+
+        if context_token is not None:
+            context = context_token.value
+
+        # find all the variables referenced.  Additionally a variable can be
+        # defined in the body of the trans block too, but this is checked at
+        # a later state.
+        plural_expr: t.Optional[nodes.Expr] = None
+        plural_expr_assignment: t.Optional[nodes.Assign] = None
+        num_called_num = False
+        variables: t.Dict[str, nodes.Expr] = {}
+        trimmed = None
+        while parser.stream.current.type != "block_end":
+            if variables:
+                parser.stream.expect("comma")
+
+            # skip colon for python compatibility
+            if parser.stream.skip_if("colon"):
+                break
+
+            token = parser.stream.expect("name")
+            if token.value in variables:
+                parser.fail(
+                    f"translatable variable {token.value!r} defined twice.",
+                    token.lineno,
+                    exc=TemplateAssertionError,
+                )
+
+            # expressions
+            if parser.stream.current.type == "assign":
+                next(parser.stream)
+                variables[token.value] = var = parser.parse_expression()
+            elif trimmed is None and token.value in ("trimmed", "notrimmed"):
+                trimmed = token.value == "trimmed"
+                continue
+            else:
+                variables[token.value] = var = nodes.Name(token.value, "load")
+
+            if plural_expr is None:
+                if isinstance(var, nodes.Call):
+                    plural_expr = nodes.Name("_trans", "load")
+                    variables[token.value] = plural_expr
+                    plural_expr_assignment = nodes.Assign(
+                        nodes.Name("_trans", "store"), var
+                    )
+                else:
+                    plural_expr = var
+                num_called_num = token.value == "num"
+
+        parser.stream.expect("block_end")
+
+        plural = None
+        have_plural = False
+        referenced = set()
+
+        # now parse until endtrans or pluralize
+        singular_names, singular = self._parse_block(parser, True)
+        if singular_names:
+            referenced.update(singular_names)
+            if plural_expr is None:
+                plural_expr = nodes.Name(singular_names[0], "load")
+                num_called_num = singular_names[0] == "num"
+
+        # if we have a pluralize block, we parse that too
+        if parser.stream.current.test("name:pluralize"):
+            have_plural = True
+            next(parser.stream)
+            if parser.stream.current.type != "block_end":
+                token = parser.stream.expect("name")
+                if token.value not in variables:
+                    parser.fail(
+                        f"unknown variable {token.value!r} for pluralization",
+                        token.lineno,
+                        exc=TemplateAssertionError,
+                    )
+                plural_expr = variables[token.value]
+                num_called_num = token.value == "num"
+            parser.stream.expect("block_end")
+            plural_names, plural = self._parse_block(parser, False)
+            next(parser.stream)
+            referenced.update(plural_names)
+        else:
+            next(parser.stream)
+
+        # register free names as simple name expressions
+        for name in referenced:
+            if name not in variables:
+                variables[name] = nodes.Name(name, "load")
+
+        if not have_plural:
+            plural_expr = None
+        elif plural_expr is None:
+            parser.fail("pluralize without variables", lineno)
+
+        if trimmed is None:
+            trimmed = self.environment.policies["ext.i18n.trimmed"]
+        if trimmed:
+            singular = self._trim_whitespace(singular)
+            if plural:
+                plural = self._trim_whitespace(plural)
+
+        node = self._make_node(
+            singular,
+            plural,
+            context,
+            variables,
+            plural_expr,
+            bool(referenced),
+            num_called_num and have_plural,
+        )
+        node.set_lineno(lineno)
+        if plural_expr_assignment is not None:
+            return [plural_expr_assignment, node]
+        else:
+            return node
+
+    def _trim_whitespace(self, string: str, _ws_re: t.Pattern[str] = _ws_re) -> str:
+        return _ws_re.sub(" ", string.strip())
+
+    def _parse_block(
+        self, parser: "Parser", allow_pluralize: bool
+    ) -> t.Tuple[t.List[str], str]:
+        """Parse until the next block tag with a given name."""
+        referenced = []
+        buf = []
+
+        while True:
+            if parser.stream.current.type == "data":
+                buf.append(parser.stream.current.value.replace("%", "%%"))
+                next(parser.stream)
+            elif parser.stream.current.type == "variable_begin":
+                next(parser.stream)
+                name = parser.stream.expect("name").value
+                referenced.append(name)
+                buf.append(f"%({name})s")
+                parser.stream.expect("variable_end")
+            elif parser.stream.current.type == "block_begin":
+                next(parser.stream)
+                if parser.stream.current.test("name:endtrans"):
+                    break
+                elif parser.stream.current.test("name:pluralize"):
+                    if allow_pluralize:
+                        break
+                    parser.fail(
+                        "a translatable section can have only one pluralize section"
+                    )
+                parser.fail(
+                    "control structures in translatable sections are not allowed"
+                )
+            elif parser.stream.eos:
+                parser.fail("unclosed translation block")
+            else:
+                raise RuntimeError("internal parser error")
+
+        return referenced, concat(buf)
+
+    def _make_node(
+        self,
+        singular: str,
+        plural: t.Optional[str],
+        context: t.Optional[str],
+        variables: t.Dict[str, nodes.Expr],
+        plural_expr: t.Optional[nodes.Expr],
+        vars_referenced: bool,
+        num_called_num: bool,
+    ) -> nodes.Output:
+        """Generates a useful node from the data provided."""
+        newstyle = self.environment.newstyle_gettext  # type: ignore
+        node: nodes.Expr
+
+        # no variables referenced?  no need to escape for old style
+        # gettext invocations only if there are vars.
+        if not vars_referenced and not newstyle:
+            singular = singular.replace("%%", "%")
+            if plural:
+                plural = plural.replace("%%", "%")
+
+        func_name = "gettext"
+        func_args: t.List[nodes.Expr] = [nodes.Const(singular)]
+
+        if context is not None:
+            func_args.insert(0, nodes.Const(context))
+            func_name = f"p{func_name}"
+
+        if plural_expr is not None:
+            func_name = f"n{func_name}"
+            func_args.extend((nodes.Const(plural), plural_expr))
+
+        node = nodes.Call(nodes.Name(func_name, "load"), func_args, [], None, None)
+
+        # in case newstyle gettext is used, the method is powerful
+        # enough to handle the variable expansion and autoescape
+        # handling itself
+        if newstyle:
+            for key, value in variables.items():
+                # the function adds that later anyways in case num was
+                # called num, so just skip it.
+                if num_called_num and key == "num":
+                    continue
+                node.kwargs.append(nodes.Keyword(key, value))
+
+        # otherwise do that here
+        else:
+            # mark the return value as safe if we are in an
+            # environment with autoescaping turned on
+            node = nodes.MarkSafeIfAutoescape(node)
+            if variables:
+                node = nodes.Mod(
+                    node,
+                    nodes.Dict(
+                        [
+                            nodes.Pair(nodes.Const(key), value)
+                            for key, value in variables.items()
+                        ]
+                    ),
+                )
+        return nodes.Output([node])
+
+
+class ExprStmtExtension(Extension):
+    """Adds a `do` tag to Jinja that works like the print statement just
+    that it doesn't print the return value.
+    """
+
+    tags = {"do"}
+
+    def parse(self, parser: "Parser") -> nodes.ExprStmt:
+        node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
+        node.node = parser.parse_tuple()
+        return node
+
+
+class LoopControlExtension(Extension):
+    """Adds break and continue to the template engine."""
+
+    tags = {"break", "continue"}
+
+    def parse(self, parser: "Parser") -> t.Union[nodes.Break, nodes.Continue]:
+        token = next(parser.stream)
+        if token.value == "break":
+            return nodes.Break(lineno=token.lineno)
+        return nodes.Continue(lineno=token.lineno)
+
+
+class DebugExtension(Extension):
+    """A ``{% debug %}`` tag that dumps the available variables,
+    filters, and tests.
+
+    .. code-block:: html+jinja
+
+        <pre>{% debug %}</pre>
+
+    .. code-block:: text
+
+        {'context': {'cycler': <class 'jinja2.utils.Cycler'>,
+                     ...,
+                     'namespace': <class 'jinja2.utils.Namespace'>},
+         'filters': ['abs', 'attr', 'batch', 'capitalize', 'center', 'count', 'd',
+                     ..., 'urlencode', 'urlize', 'wordcount', 'wordwrap', 'xmlattr'],
+         'tests': ['!=', '<', '<=', '==', '>', '>=', 'callable', 'defined',
+                   ..., 'odd', 'sameas', 'sequence', 'string', 'undefined', 'upper']}
+
+    .. versionadded:: 2.11.0
+    """
+
+    tags = {"debug"}
+
+    def parse(self, parser: "Parser") -> nodes.Output:
+        lineno = parser.stream.expect("name:debug").lineno
+        context = nodes.ContextReference()
+        result = self.call_method("_render", [context], lineno=lineno)
+        return nodes.Output([result], lineno=lineno)
+
+    def _render(self, context: Context) -> str:
+        result = {
+            "context": context.get_all(),
+            "filters": sorted(self.environment.filters.keys()),
+            "tests": sorted(self.environment.tests.keys()),
+        }
+
+        # Set the depth since the intent is to show the top few names.
+        return pprint.pformat(result, depth=3, compact=True)
+
+
+def extract_from_ast(
+    ast: nodes.Template,
+    gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
+    babel_style: bool = True,
+) -> t.Iterator[
+    t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]]
+]:
+    """Extract localizable strings from the given template node.  Per
+    default this function returns matches in babel style that means non string
+    parameters as well as keyword arguments are returned as `None`.  This
+    allows Babel to figure out what you really meant if you are using
+    gettext functions that allow keyword arguments for placeholder expansion.
+    If you don't want that behavior set the `babel_style` parameter to `False`
+    which causes only strings to be returned and parameters are always stored
+    in tuples.  As a consequence invalid gettext calls (calls without a single
+    string parameter or string parameters after non-string parameters) are
+    skipped.
+
+    This example explains the behavior:
+
+    >>> from jinja2 import Environment
+    >>> env = Environment()
+    >>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
+    >>> list(extract_from_ast(node))
+    [(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))]
+    >>> list(extract_from_ast(node, babel_style=False))
+    [(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))]
+
+    For every string found this function yields a ``(lineno, function,
+    message)`` tuple, where:
+
+    * ``lineno`` is the number of the line on which the string was found,
+    * ``function`` is the name of the ``gettext`` function used (if the
+      string was extracted from embedded Python code), and
+    *   ``message`` is the string, or a tuple of strings for functions
+         with multiple string arguments.
+
+    This extraction function operates on the AST and is because of that unable
+    to extract any comments.  For comment support you have to use the babel
+    extraction interface or extract comments yourself.
+    """
+    out: t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]
+
+    for node in ast.find_all(nodes.Call):
+        if (
+            not isinstance(node.node, nodes.Name)
+            or node.node.name not in gettext_functions
+        ):
+            continue
+
+        strings: t.List[t.Optional[str]] = []
+
+        for arg in node.args:
+            if isinstance(arg, nodes.Const) and isinstance(arg.value, str):
+                strings.append(arg.value)
+            else:
+                strings.append(None)
+
+        for _ in node.kwargs:
+            strings.append(None)
+        if node.dyn_args is not None:
+            strings.append(None)
+        if node.dyn_kwargs is not None:
+            strings.append(None)
+
+        if not babel_style:
+            out = tuple(x for x in strings if x is not None)
+
+            if not out:
+                continue
+        else:
+            if len(strings) == 1:
+                out = strings[0]
+            else:
+                out = tuple(strings)
+
+        yield node.lineno, node.node.name, out
+
+
+class _CommentFinder:
+    """Helper class to find comments in a token stream.  Can only
+    find comments for gettext calls forwards.  Once the comment
+    from line 4 is found, a comment for line 1 will not return a
+    usable value.
+    """
+
+    def __init__(
+        self, tokens: t.Sequence[t.Tuple[int, str, str]], comment_tags: t.Sequence[str]
+    ) -> None:
+        self.tokens = tokens
+        self.comment_tags = comment_tags
+        self.offset = 0
+        self.last_lineno = 0
+
+    def find_backwards(self, offset: int) -> t.List[str]:
+        try:
+            for _, token_type, token_value in reversed(
+                self.tokens[self.offset : offset]
+            ):
+                if token_type in ("comment", "linecomment"):
+                    try:
+                        prefix, comment = token_value.split(None, 1)
+                    except ValueError:
+                        continue
+                    if prefix in self.comment_tags:
+                        return [comment.rstrip()]
+            return []
+        finally:
+            self.offset = offset
+
+    def find_comments(self, lineno: int) -> t.List[str]:
+        if not self.comment_tags or self.last_lineno > lineno:
+            return []
+        for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]):
+            if token_lineno > lineno:
+                return self.find_backwards(self.offset + idx)
+        return self.find_backwards(len(self.tokens))
+
+
+def babel_extract(
+    fileobj: t.BinaryIO,
+    keywords: t.Sequence[str],
+    comment_tags: t.Sequence[str],
+    options: t.Dict[str, t.Any],
+) -> t.Iterator[
+    t.Tuple[
+        int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]], t.List[str]
+    ]
+]:
+    """Babel extraction method for Jinja templates.
+
+    .. versionchanged:: 2.3
+       Basic support for translation comments was added.  If `comment_tags`
+       is now set to a list of keywords for extraction, the extractor will
+       try to find the best preceding comment that begins with one of the
+       keywords.  For best results, make sure to not have more than one
+       gettext call in one line of code and the matching comment in the
+       same line or the line before.
+
+    .. versionchanged:: 2.5.1
+       The `newstyle_gettext` flag can be set to `True` to enable newstyle
+       gettext calls.
+
+    .. versionchanged:: 2.7
+       A `silent` option can now be provided.  If set to `False` template
+       syntax errors are propagated instead of being ignored.
+
+    :param fileobj: the file-like object the messages should be extracted from
+    :param keywords: a list of keywords (i.e. function names) that should be
+                     recognized as translation functions
+    :param comment_tags: a list of translator tags to search for and include
+                         in the results.
+    :param options: a dictionary of additional options (optional)
+    :return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
+             (comments will be empty currently)
+    """
+    extensions: t.Dict[t.Type[Extension], None] = {}
+
+    for extension_name in options.get("extensions", "").split(","):
+        extension_name = extension_name.strip()
+
+        if not extension_name:
+            continue
+
+        extensions[import_string(extension_name)] = None
+
+    if InternationalizationExtension not in extensions:
+        extensions[InternationalizationExtension] = None
+
+    def getbool(options: t.Mapping[str, str], key: str, default: bool = False) -> bool:
+        return options.get(key, str(default)).lower() in {"1", "on", "yes", "true"}
+
+    silent = getbool(options, "silent", True)
+    environment = Environment(
+        options.get("block_start_string", defaults.BLOCK_START_STRING),
+        options.get("block_end_string", defaults.BLOCK_END_STRING),
+        options.get("variable_start_string", defaults.VARIABLE_START_STRING),
+        options.get("variable_end_string", defaults.VARIABLE_END_STRING),
+        options.get("comment_start_string", defaults.COMMENT_START_STRING),
+        options.get("comment_end_string", defaults.COMMENT_END_STRING),
+        options.get("line_statement_prefix") or defaults.LINE_STATEMENT_PREFIX,
+        options.get("line_comment_prefix") or defaults.LINE_COMMENT_PREFIX,
+        getbool(options, "trim_blocks", defaults.TRIM_BLOCKS),
+        getbool(options, "lstrip_blocks", defaults.LSTRIP_BLOCKS),
+        defaults.NEWLINE_SEQUENCE,
+        getbool(options, "keep_trailing_newline", defaults.KEEP_TRAILING_NEWLINE),
+        tuple(extensions),
+        cache_size=0,
+        auto_reload=False,
+    )
+
+    if getbool(options, "trimmed"):
+        environment.policies["ext.i18n.trimmed"] = True
+    if getbool(options, "newstyle_gettext"):
+        environment.newstyle_gettext = True  # type: ignore
+
+    source = fileobj.read().decode(options.get("encoding", "utf-8"))
+    try:
+        node = environment.parse(source)
+        tokens = list(environment.lex(environment.preprocess(source)))
+    except TemplateSyntaxError:
+        if not silent:
+            raise
+        # skip templates with syntax errors
+        return
+
+    finder = _CommentFinder(tokens, comment_tags)
+    for lineno, func, message in extract_from_ast(node, keywords):
+        yield lineno, func, message, finder.find_comments(lineno)
+
+
+#: nicer import names
+i18n = InternationalizationExtension
+do = ExprStmtExtension
+loopcontrols = LoopControlExtension
+debug = DebugExtension
diff --git a/venv/lib/python3.9/site-packages/jinja2/filters.py b/venv/lib/python3.9/site-packages/jinja2/filters.py
new file mode 100644
index 0000000..ed07c4c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/filters.py
@@ -0,0 +1,1840 @@
+"""Built-in template filters used with the ``|`` operator."""
+import math
+import random
+import re
+import typing
+import typing as t
+from collections import abc
+from itertools import chain
+from itertools import groupby
+
+from markupsafe import escape
+from markupsafe import Markup
+from markupsafe import soft_str
+
+from .async_utils import async_variant
+from .async_utils import auto_aiter
+from .async_utils import auto_await
+from .async_utils import auto_to_list
+from .exceptions import FilterArgumentError
+from .runtime import Undefined
+from .utils import htmlsafe_json_dumps
+from .utils import pass_context
+from .utils import pass_environment
+from .utils import pass_eval_context
+from .utils import pformat
+from .utils import url_quote
+from .utils import urlize
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .environment import Environment
+    from .nodes import EvalContext
+    from .runtime import Context
+    from .sandbox import SandboxedEnvironment  # noqa: F401
+
+    class HasHTML(te.Protocol):
+        def __html__(self) -> str:
+            pass
+
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+K = t.TypeVar("K")
+V = t.TypeVar("V")
+
+
+def ignore_case(value: V) -> V:
+    """For use as a postprocessor for :func:`make_attrgetter`. Converts strings
+    to lowercase and returns other types as-is."""
+    if isinstance(value, str):
+        return t.cast(V, value.lower())
+
+    return value
+
+
+def make_attrgetter(
+    environment: "Environment",
+    attribute: t.Optional[t.Union[str, int]],
+    postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None,
+    default: t.Optional[t.Any] = None,
+) -> t.Callable[[t.Any], t.Any]:
+    """Returns a callable that looks up the given attribute from a
+    passed object with the rules of the environment.  Dots are allowed
+    to access attributes of attributes.  Integer parts in paths are
+    looked up as integers.
+    """
+    parts = _prepare_attribute_parts(attribute)
+
+    def attrgetter(item: t.Any) -> t.Any:
+        for part in parts:
+            item = environment.getitem(item, part)
+
+            if default is not None and isinstance(item, Undefined):
+                item = default
+
+        if postprocess is not None:
+            item = postprocess(item)
+
+        return item
+
+    return attrgetter
+
+
+def make_multi_attrgetter(
+    environment: "Environment",
+    attribute: t.Optional[t.Union[str, int]],
+    postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None,
+) -> t.Callable[[t.Any], t.List[t.Any]]:
+    """Returns a callable that looks up the given comma separated
+    attributes from a passed object with the rules of the environment.
+    Dots are allowed to access attributes of each attribute.  Integer
+    parts in paths are looked up as integers.
+
+    The value returned by the returned callable is a list of extracted
+    attribute values.
+
+    Examples of attribute: "attr1,attr2", "attr1.inner1.0,attr2.inner2.0", etc.
+    """
+    if isinstance(attribute, str):
+        split: t.Sequence[t.Union[str, int, None]] = attribute.split(",")
+    else:
+        split = [attribute]
+
+    parts = [_prepare_attribute_parts(item) for item in split]
+
+    def attrgetter(item: t.Any) -> t.List[t.Any]:
+        items = [None] * len(parts)
+
+        for i, attribute_part in enumerate(parts):
+            item_i = item
+
+            for part in attribute_part:
+                item_i = environment.getitem(item_i, part)
+
+            if postprocess is not None:
+                item_i = postprocess(item_i)
+
+            items[i] = item_i
+
+        return items
+
+    return attrgetter
+
+
+def _prepare_attribute_parts(
+    attr: t.Optional[t.Union[str, int]]
+) -> t.List[t.Union[str, int]]:
+    if attr is None:
+        return []
+
+    if isinstance(attr, str):
+        return [int(x) if x.isdigit() else x for x in attr.split(".")]
+
+    return [attr]
+
+
+def do_forceescape(value: "t.Union[str, HasHTML]") -> Markup:
+    """Enforce HTML escaping.  This will probably double escape variables."""
+    if hasattr(value, "__html__"):
+        value = t.cast("HasHTML", value).__html__()
+
+    return escape(str(value))
+
+
+def do_urlencode(
+    value: t.Union[str, t.Mapping[str, t.Any], t.Iterable[t.Tuple[str, t.Any]]]
+) -> str:
+    """Quote data for use in a URL path or query using UTF-8.
+
+    Basic wrapper around :func:`urllib.parse.quote` when given a
+    string, or :func:`urllib.parse.urlencode` for a dict or iterable.
+
+    :param value: Data to quote. A string will be quoted directly. A
+        dict or iterable of ``(key, value)`` pairs will be joined as a
+        query string.
+
+    When given a string, "/" is not quoted. HTTP servers treat "/" and
+    "%2F" equivalently in paths. If you need quoted slashes, use the
+    ``|replace("/", "%2F")`` filter.
+
+    .. versionadded:: 2.7
+    """
+    if isinstance(value, str) or not isinstance(value, abc.Iterable):
+        return url_quote(value)
+
+    if isinstance(value, dict):
+        items: t.Iterable[t.Tuple[str, t.Any]] = value.items()
+    else:
+        items = value  # type: ignore
+
+    return "&".join(
+        f"{url_quote(k, for_qs=True)}={url_quote(v, for_qs=True)}" for k, v in items
+    )
+
+
+@pass_eval_context
+def do_replace(
+    eval_ctx: "EvalContext", s: str, old: str, new: str, count: t.Optional[int] = None
+) -> str:
+    """Return a copy of the value with all occurrences of a substring
+    replaced with a new one. The first argument is the substring
+    that should be replaced, the second is the replacement string.
+    If the optional third argument ``count`` is given, only the first
+    ``count`` occurrences are replaced:
+
+    .. sourcecode:: jinja
+
+        {{ "Hello World"|replace("Hello", "Goodbye") }}
+            -> Goodbye World
+
+        {{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
+            -> d'oh, d'oh, aaargh
+    """
+    if count is None:
+        count = -1
+
+    if not eval_ctx.autoescape:
+        return str(s).replace(str(old), str(new), count)
+
+    if (
+        hasattr(old, "__html__")
+        or hasattr(new, "__html__")
+        and not hasattr(s, "__html__")
+    ):
+        s = escape(s)
+    else:
+        s = soft_str(s)
+
+    return s.replace(soft_str(old), soft_str(new), count)
+
+
+def do_upper(s: str) -> str:
+    """Convert a value to uppercase."""
+    return soft_str(s).upper()
+
+
+def do_lower(s: str) -> str:
+    """Convert a value to lowercase."""
+    return soft_str(s).lower()
+
+
+def do_items(value: t.Union[t.Mapping[K, V], Undefined]) -> t.Iterator[t.Tuple[K, V]]:
+    """Return an iterator over the ``(key, value)`` items of a mapping.
+
+    ``x|items`` is the same as ``x.items()``, except if ``x`` is
+    undefined an empty iterator is returned.
+
+    This filter is useful if you expect the template to be rendered with
+    an implementation of Jinja in another programming language that does
+    not have a ``.items()`` method on its mapping type.
+
+    .. code-block:: html+jinja
+
+        <dl>
+        {% for key, value in my_dict|items %}
+            <dt>{{ key }}
+            <dd>{{ value }}
+        {% endfor %}
+        </dl>
+
+    .. versionadded:: 3.1
+    """
+    if isinstance(value, Undefined):
+        return
+
+    if not isinstance(value, abc.Mapping):
+        raise TypeError("Can only get item pairs from a mapping.")
+
+    yield from value.items()
+
+
+@pass_eval_context
+def do_xmlattr(
+    eval_ctx: "EvalContext", d: t.Mapping[str, t.Any], autospace: bool = True
+) -> str:
+    """Create an SGML/XML attribute string based on the items in a dict.
+    All values that are neither `none` nor `undefined` are automatically
+    escaped:
+
+    .. sourcecode:: html+jinja
+
+        <ul{{ {'class': 'my_list', 'missing': none,
+                'id': 'list-%d'|format(variable)}|xmlattr }}>
+        ...
+        </ul>
+
+    Results in something like this:
+
+    .. sourcecode:: html
+
+        <ul class="my_list" id="list-42">
+        ...
+        </ul>
+
+    As you can see it automatically prepends a space in front of the item
+    if the filter returned something unless the second parameter is false.
+    """
+    rv = " ".join(
+        f'{escape(key)}="{escape(value)}"'
+        for key, value in d.items()
+        if value is not None and not isinstance(value, Undefined)
+    )
+
+    if autospace and rv:
+        rv = " " + rv
+
+    if eval_ctx.autoescape:
+        rv = Markup(rv)
+
+    return rv
+
+
+def do_capitalize(s: str) -> str:
+    """Capitalize a value. The first character will be uppercase, all others
+    lowercase.
+    """
+    return soft_str(s).capitalize()
+
+
+_word_beginning_split_re = re.compile(r"([-\s({\[<]+)")
+
+
+def do_title(s: str) -> str:
+    """Return a titlecased version of the value. I.e. words will start with
+    uppercase letters, all remaining characters are lowercase.
+    """
+    return "".join(
+        [
+            item[0].upper() + item[1:].lower()
+            for item in _word_beginning_split_re.split(soft_str(s))
+            if item
+        ]
+    )
+
+
+def do_dictsort(
+    value: t.Mapping[K, V],
+    case_sensitive: bool = False,
+    by: 'te.Literal["key", "value"]' = "key",
+    reverse: bool = False,
+) -> t.List[t.Tuple[K, V]]:
+    """Sort a dict and yield (key, value) pairs. Python dicts may not
+    be in the order you want to display them in, so sort them first.
+
+    .. sourcecode:: jinja
+
+        {% for key, value in mydict|dictsort %}
+            sort the dict by key, case insensitive
+
+        {% for key, value in mydict|dictsort(reverse=true) %}
+            sort the dict by key, case insensitive, reverse order
+
+        {% for key, value in mydict|dictsort(true) %}
+            sort the dict by key, case sensitive
+
+        {% for key, value in mydict|dictsort(false, 'value') %}
+            sort the dict by value, case insensitive
+    """
+    if by == "key":
+        pos = 0
+    elif by == "value":
+        pos = 1
+    else:
+        raise FilterArgumentError('You can only sort by either "key" or "value"')
+
+    def sort_func(item: t.Tuple[t.Any, t.Any]) -> t.Any:
+        value = item[pos]
+
+        if not case_sensitive:
+            value = ignore_case(value)
+
+        return value
+
+    return sorted(value.items(), key=sort_func, reverse=reverse)
+
+
+@pass_environment
+def do_sort(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    reverse: bool = False,
+    case_sensitive: bool = False,
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> "t.List[V]":
+    """Sort an iterable using Python's :func:`sorted`.
+
+    .. sourcecode:: jinja
+
+        {% for city in cities|sort %}
+            ...
+        {% endfor %}
+
+    :param reverse: Sort descending instead of ascending.
+    :param case_sensitive: When sorting strings, sort upper and lower
+        case separately.
+    :param attribute: When sorting objects or dicts, an attribute or
+        key to sort by. Can use dot notation like ``"address.city"``.
+        Can be a list of attributes like ``"age,name"``.
+
+    The sort is stable, it does not change the relative order of
+    elements that compare equal. This makes it is possible to chain
+    sorts on different attributes and ordering.
+
+    .. sourcecode:: jinja
+
+        {% for user in users|sort(attribute="name")
+            |sort(reverse=true, attribute="age") %}
+            ...
+        {% endfor %}
+
+    As a shortcut to chaining when the direction is the same for all
+    attributes, pass a comma separate list of attributes.
+
+    .. sourcecode:: jinja
+
+        {% for user in users|sort(attribute="age,name") %}
+            ...
+        {% endfor %}
+
+    .. versionchanged:: 2.11.0
+        The ``attribute`` parameter can be a comma separated list of
+        attributes, e.g. ``"age,name"``.
+
+    .. versionchanged:: 2.6
+       The ``attribute`` parameter was added.
+    """
+    key_func = make_multi_attrgetter(
+        environment, attribute, postprocess=ignore_case if not case_sensitive else None
+    )
+    return sorted(value, key=key_func, reverse=reverse)
+
+
+@pass_environment
+def do_unique(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    case_sensitive: bool = False,
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> "t.Iterator[V]":
+    """Returns a list of unique items from the given iterable.
+
+    .. sourcecode:: jinja
+
+        {{ ['foo', 'bar', 'foobar', 'FooBar']|unique|list }}
+            -> ['foo', 'bar', 'foobar']
+
+    The unique items are yielded in the same order as their first occurrence in
+    the iterable passed to the filter.
+
+    :param case_sensitive: Treat upper and lower case strings as distinct.
+    :param attribute: Filter objects with unique values for this attribute.
+    """
+    getter = make_attrgetter(
+        environment, attribute, postprocess=ignore_case if not case_sensitive else None
+    )
+    seen = set()
+
+    for item in value:
+        key = getter(item)
+
+        if key not in seen:
+            seen.add(key)
+            yield item
+
+
+def _min_or_max(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    func: "t.Callable[..., V]",
+    case_sensitive: bool,
+    attribute: t.Optional[t.Union[str, int]],
+) -> "t.Union[V, Undefined]":
+    it = iter(value)
+
+    try:
+        first = next(it)
+    except StopIteration:
+        return environment.undefined("No aggregated item, sequence was empty.")
+
+    key_func = make_attrgetter(
+        environment, attribute, postprocess=ignore_case if not case_sensitive else None
+    )
+    return func(chain([first], it), key=key_func)
+
+
+@pass_environment
+def do_min(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    case_sensitive: bool = False,
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> "t.Union[V, Undefined]":
+    """Return the smallest item from the sequence.
+
+    .. sourcecode:: jinja
+
+        {{ [1, 2, 3]|min }}
+            -> 1
+
+    :param case_sensitive: Treat upper and lower case strings as distinct.
+    :param attribute: Get the object with the min value of this attribute.
+    """
+    return _min_or_max(environment, value, min, case_sensitive, attribute)
+
+
+@pass_environment
+def do_max(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    case_sensitive: bool = False,
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> "t.Union[V, Undefined]":
+    """Return the largest item from the sequence.
+
+    .. sourcecode:: jinja
+
+        {{ [1, 2, 3]|max }}
+            -> 3
+
+    :param case_sensitive: Treat upper and lower case strings as distinct.
+    :param attribute: Get the object with the max value of this attribute.
+    """
+    return _min_or_max(environment, value, max, case_sensitive, attribute)
+
+
+def do_default(
+    value: V,
+    default_value: V = "",  # type: ignore
+    boolean: bool = False,
+) -> V:
+    """If the value is undefined it will return the passed default value,
+    otherwise the value of the variable:
+
+    .. sourcecode:: jinja
+
+        {{ my_variable|default('my_variable is not defined') }}
+
+    This will output the value of ``my_variable`` if the variable was
+    defined, otherwise ``'my_variable is not defined'``. If you want
+    to use default with variables that evaluate to false you have to
+    set the second parameter to `true`:
+
+    .. sourcecode:: jinja
+
+        {{ ''|default('the string was empty', true) }}
+
+    .. versionchanged:: 2.11
+       It's now possible to configure the :class:`~jinja2.Environment` with
+       :class:`~jinja2.ChainableUndefined` to make the `default` filter work
+       on nested elements and attributes that may contain undefined values
+       in the chain without getting an :exc:`~jinja2.UndefinedError`.
+    """
+    if isinstance(value, Undefined) or (boolean and not value):
+        return default_value
+
+    return value
+
+
+@pass_eval_context
+def sync_do_join(
+    eval_ctx: "EvalContext",
+    value: t.Iterable,
+    d: str = "",
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> str:
+    """Return a string which is the concatenation of the strings in the
+    sequence. The separator between elements is an empty string per
+    default, you can define it with the optional parameter:
+
+    .. sourcecode:: jinja
+
+        {{ [1, 2, 3]|join('|') }}
+            -> 1|2|3
+
+        {{ [1, 2, 3]|join }}
+            -> 123
+
+    It is also possible to join certain attributes of an object:
+
+    .. sourcecode:: jinja
+
+        {{ users|join(', ', attribute='username') }}
+
+    .. versionadded:: 2.6
+       The `attribute` parameter was added.
+    """
+    if attribute is not None:
+        value = map(make_attrgetter(eval_ctx.environment, attribute), value)
+
+    # no automatic escaping?  joining is a lot easier then
+    if not eval_ctx.autoescape:
+        return str(d).join(map(str, value))
+
+    # if the delimiter doesn't have an html representation we check
+    # if any of the items has.  If yes we do a coercion to Markup
+    if not hasattr(d, "__html__"):
+        value = list(value)
+        do_escape = False
+
+        for idx, item in enumerate(value):
+            if hasattr(item, "__html__"):
+                do_escape = True
+            else:
+                value[idx] = str(item)
+
+        if do_escape:
+            d = escape(d)
+        else:
+            d = str(d)
+
+        return d.join(value)
+
+    # no html involved, to normal joining
+    return soft_str(d).join(map(soft_str, value))
+
+
+@async_variant(sync_do_join)  # type: ignore
+async def do_join(
+    eval_ctx: "EvalContext",
+    value: t.Union[t.AsyncIterable, t.Iterable],
+    d: str = "",
+    attribute: t.Optional[t.Union[str, int]] = None,
+) -> str:
+    return sync_do_join(eval_ctx, await auto_to_list(value), d, attribute)
+
+
+def do_center(value: str, width: int = 80) -> str:
+    """Centers the value in a field of a given width."""
+    return soft_str(value).center(width)
+
+
+@pass_environment
+def sync_do_first(
+    environment: "Environment", seq: "t.Iterable[V]"
+) -> "t.Union[V, Undefined]":
+    """Return the first item of a sequence."""
+    try:
+        return next(iter(seq))
+    except StopIteration:
+        return environment.undefined("No first item, sequence was empty.")
+
+
+@async_variant(sync_do_first)  # type: ignore
+async def do_first(
+    environment: "Environment", seq: "t.Union[t.AsyncIterable[V], t.Iterable[V]]"
+) -> "t.Union[V, Undefined]":
+    try:
+        return await auto_aiter(seq).__anext__()
+    except StopAsyncIteration:
+        return environment.undefined("No first item, sequence was empty.")
+
+
+@pass_environment
+def do_last(
+    environment: "Environment", seq: "t.Reversible[V]"
+) -> "t.Union[V, Undefined]":
+    """Return the last item of a sequence.
+
+    Note: Does not work with generators. You may want to explicitly
+    convert it to a list:
+
+    .. sourcecode:: jinja
+
+        {{ data | selectattr('name', '==', 'Jinja') | list | last }}
+    """
+    try:
+        return next(iter(reversed(seq)))
+    except StopIteration:
+        return environment.undefined("No last item, sequence was empty.")
+
+
+# No async do_last, it may not be safe in async mode.
+
+
+@pass_context
+def do_random(context: "Context", seq: "t.Sequence[V]") -> "t.Union[V, Undefined]":
+    """Return a random item from the sequence."""
+    try:
+        return random.choice(seq)
+    except IndexError:
+        return context.environment.undefined("No random item, sequence was empty.")
+
+
+def do_filesizeformat(value: t.Union[str, float, int], binary: bool = False) -> str:
+    """Format the value like a 'human-readable' file size (i.e. 13 kB,
+    4.1 MB, 102 Bytes, etc).  Per default decimal prefixes are used (Mega,
+    Giga, etc.), if the second parameter is set to `True` the binary
+    prefixes are used (Mebi, Gibi).
+    """
+    bytes = float(value)
+    base = 1024 if binary else 1000
+    prefixes = [
+        ("KiB" if binary else "kB"),
+        ("MiB" if binary else "MB"),
+        ("GiB" if binary else "GB"),
+        ("TiB" if binary else "TB"),
+        ("PiB" if binary else "PB"),
+        ("EiB" if binary else "EB"),
+        ("ZiB" if binary else "ZB"),
+        ("YiB" if binary else "YB"),
+    ]
+
+    if bytes == 1:
+        return "1 Byte"
+    elif bytes < base:
+        return f"{int(bytes)} Bytes"
+    else:
+        for i, prefix in enumerate(prefixes):
+            unit = base ** (i + 2)
+
+            if bytes < unit:
+                return f"{base * bytes / unit:.1f} {prefix}"
+
+        return f"{base * bytes / unit:.1f} {prefix}"
+
+
+def do_pprint(value: t.Any) -> str:
+    """Pretty print a variable. Useful for debugging."""
+    return pformat(value)
+
+
+_uri_scheme_re = re.compile(r"^([\w.+-]{2,}:(/){0,2})$")
+
+
+@pass_eval_context
+def do_urlize(
+    eval_ctx: "EvalContext",
+    value: str,
+    trim_url_limit: t.Optional[int] = None,
+    nofollow: bool = False,
+    target: t.Optional[str] = None,
+    rel: t.Optional[str] = None,
+    extra_schemes: t.Optional[t.Iterable[str]] = None,
+) -> str:
+    """Convert URLs in text into clickable links.
+
+    This may not recognize links in some situations. Usually, a more
+    comprehensive formatter, such as a Markdown library, is a better
+    choice.
+
+    Works on ``http://``, ``https://``, ``www.``, ``mailto:``, and email
+    addresses. Links with trailing punctuation (periods, commas, closing
+    parentheses) and leading punctuation (opening parentheses) are
+    recognized excluding the punctuation. Email addresses that include
+    header fields are not recognized (for example,
+    ``mailto:address@example.com?cc=copy@example.com``).
+
+    :param value: Original text containing URLs to link.
+    :param trim_url_limit: Shorten displayed URL values to this length.
+    :param nofollow: Add the ``rel=nofollow`` attribute to links.
+    :param target: Add the ``target`` attribute to links.
+    :param rel: Add the ``rel`` attribute to links.
+    :param extra_schemes: Recognize URLs that start with these schemes
+        in addition to the default behavior. Defaults to
+        ``env.policies["urlize.extra_schemes"]``, which defaults to no
+        extra schemes.
+
+    .. versionchanged:: 3.0
+        The ``extra_schemes`` parameter was added.
+
+    .. versionchanged:: 3.0
+        Generate ``https://`` links for URLs without a scheme.
+
+    .. versionchanged:: 3.0
+        The parsing rules were updated. Recognize email addresses with
+        or without the ``mailto:`` scheme. Validate IP addresses. Ignore
+        parentheses and brackets in more cases.
+
+    .. versionchanged:: 2.8
+       The ``target`` parameter was added.
+    """
+    policies = eval_ctx.environment.policies
+    rel_parts = set((rel or "").split())
+
+    if nofollow:
+        rel_parts.add("nofollow")
+
+    rel_parts.update((policies["urlize.rel"] or "").split())
+    rel = " ".join(sorted(rel_parts)) or None
+
+    if target is None:
+        target = policies["urlize.target"]
+
+    if extra_schemes is None:
+        extra_schemes = policies["urlize.extra_schemes"] or ()
+
+    for scheme in extra_schemes:
+        if _uri_scheme_re.fullmatch(scheme) is None:
+            raise FilterArgumentError(f"{scheme!r} is not a valid URI scheme prefix.")
+
+    rv = urlize(
+        value,
+        trim_url_limit=trim_url_limit,
+        rel=rel,
+        target=target,
+        extra_schemes=extra_schemes,
+    )
+
+    if eval_ctx.autoescape:
+        rv = Markup(rv)
+
+    return rv
+
+
+def do_indent(
+    s: str, width: t.Union[int, str] = 4, first: bool = False, blank: bool = False
+) -> str:
+    """Return a copy of the string with each line indented by 4 spaces. The
+    first line and blank lines are not indented by default.
+
+    :param width: Number of spaces, or a string, to indent by.
+    :param first: Don't skip indenting the first line.
+    :param blank: Don't skip indenting empty lines.
+
+    .. versionchanged:: 3.0
+        ``width`` can be a string.
+
+    .. versionchanged:: 2.10
+        Blank lines are not indented by default.
+
+        Rename the ``indentfirst`` argument to ``first``.
+    """
+    if isinstance(width, str):
+        indention = width
+    else:
+        indention = " " * width
+
+    newline = "\n"
+
+    if isinstance(s, Markup):
+        indention = Markup(indention)
+        newline = Markup(newline)
+
+    s += newline  # this quirk is necessary for splitlines method
+
+    if blank:
+        rv = (newline + indention).join(s.splitlines())
+    else:
+        lines = s.splitlines()
+        rv = lines.pop(0)
+
+        if lines:
+            rv += newline + newline.join(
+                indention + line if line else line for line in lines
+            )
+
+    if first:
+        rv = indention + rv
+
+    return rv
+
+
+@pass_environment
+def do_truncate(
+    env: "Environment",
+    s: str,
+    length: int = 255,
+    killwords: bool = False,
+    end: str = "...",
+    leeway: t.Optional[int] = None,
+) -> str:
+    """Return a truncated copy of the string. The length is specified
+    with the first parameter which defaults to ``255``. If the second
+    parameter is ``true`` the filter will cut the text at length. Otherwise
+    it will discard the last word. If the text was in fact
+    truncated it will append an ellipsis sign (``"..."``). If you want a
+    different ellipsis sign than ``"..."`` you can specify it using the
+    third parameter. Strings that only exceed the length by the tolerance
+    margin given in the fourth parameter will not be truncated.
+
+    .. sourcecode:: jinja
+
+        {{ "foo bar baz qux"|truncate(9) }}
+            -> "foo..."
+        {{ "foo bar baz qux"|truncate(9, True) }}
+            -> "foo ba..."
+        {{ "foo bar baz qux"|truncate(11) }}
+            -> "foo bar baz qux"
+        {{ "foo bar baz qux"|truncate(11, False, '...', 0) }}
+            -> "foo bar..."
+
+    The default leeway on newer Jinja versions is 5 and was 0 before but
+    can be reconfigured globally.
+    """
+    if leeway is None:
+        leeway = env.policies["truncate.leeway"]
+
+    assert length >= len(end), f"expected length >= {len(end)}, got {length}"
+    assert leeway >= 0, f"expected leeway >= 0, got {leeway}"
+
+    if len(s) <= length + leeway:
+        return s
+
+    if killwords:
+        return s[: length - len(end)] + end
+
+    result = s[: length - len(end)].rsplit(" ", 1)[0]
+    return result + end
+
+
+@pass_environment
+def do_wordwrap(
+    environment: "Environment",
+    s: str,
+    width: int = 79,
+    break_long_words: bool = True,
+    wrapstring: t.Optional[str] = None,
+    break_on_hyphens: bool = True,
+) -> str:
+    """Wrap a string to the given width. Existing newlines are treated
+    as paragraphs to be wrapped separately.
+
+    :param s: Original text to wrap.
+    :param width: Maximum length of wrapped lines.
+    :param break_long_words: If a word is longer than ``width``, break
+        it across lines.
+    :param break_on_hyphens: If a word contains hyphens, it may be split
+        across lines.
+    :param wrapstring: String to join each wrapped line. Defaults to
+        :attr:`Environment.newline_sequence`.
+
+    .. versionchanged:: 2.11
+        Existing newlines are treated as paragraphs wrapped separately.
+
+    .. versionchanged:: 2.11
+        Added the ``break_on_hyphens`` parameter.
+
+    .. versionchanged:: 2.7
+        Added the ``wrapstring`` parameter.
+    """
+    import textwrap
+
+    if wrapstring is None:
+        wrapstring = environment.newline_sequence
+
+    # textwrap.wrap doesn't consider existing newlines when wrapping.
+    # If the string has a newline before width, wrap will still insert
+    # a newline at width, resulting in a short line. Instead, split and
+    # wrap each paragraph individually.
+    return wrapstring.join(
+        [
+            wrapstring.join(
+                textwrap.wrap(
+                    line,
+                    width=width,
+                    expand_tabs=False,
+                    replace_whitespace=False,
+                    break_long_words=break_long_words,
+                    break_on_hyphens=break_on_hyphens,
+                )
+            )
+            for line in s.splitlines()
+        ]
+    )
+
+
+_word_re = re.compile(r"\w+")
+
+
+def do_wordcount(s: str) -> int:
+    """Count the words in that string."""
+    return len(_word_re.findall(soft_str(s)))
+
+
+def do_int(value: t.Any, default: int = 0, base: int = 10) -> int:
+    """Convert the value into an integer. If the
+    conversion doesn't work it will return ``0``. You can
+    override this default using the first parameter. You
+    can also override the default base (10) in the second
+    parameter, which handles input with prefixes such as
+    0b, 0o and 0x for bases 2, 8 and 16 respectively.
+    The base is ignored for decimal numbers and non-string values.
+    """
+    try:
+        if isinstance(value, str):
+            return int(value, base)
+
+        return int(value)
+    except (TypeError, ValueError):
+        # this quirk is necessary so that "42.23"|int gives 42.
+        try:
+            return int(float(value))
+        except (TypeError, ValueError):
+            return default
+
+
+def do_float(value: t.Any, default: float = 0.0) -> float:
+    """Convert the value into a floating point number. If the
+    conversion doesn't work it will return ``0.0``. You can
+    override this default using the first parameter.
+    """
+    try:
+        return float(value)
+    except (TypeError, ValueError):
+        return default
+
+
+def do_format(value: str, *args: t.Any, **kwargs: t.Any) -> str:
+    """Apply the given values to a `printf-style`_ format string, like
+    ``string % values``.
+
+    .. sourcecode:: jinja
+
+        {{ "%s, %s!"|format(greeting, name) }}
+        Hello, World!
+
+    In most cases it should be more convenient and efficient to use the
+    ``%`` operator or :meth:`str.format`.
+
+    .. code-block:: text
+
+        {{ "%s, %s!" % (greeting, name) }}
+        {{ "{}, {}!".format(greeting, name) }}
+
+    .. _printf-style: https://docs.python.org/library/stdtypes.html
+        #printf-style-string-formatting
+    """
+    if args and kwargs:
+        raise FilterArgumentError(
+            "can't handle positional and keyword arguments at the same time"
+        )
+
+    return soft_str(value) % (kwargs or args)
+
+
+def do_trim(value: str, chars: t.Optional[str] = None) -> str:
+    """Strip leading and trailing characters, by default whitespace."""
+    return soft_str(value).strip(chars)
+
+
+def do_striptags(value: "t.Union[str, HasHTML]") -> str:
+    """Strip SGML/XML tags and replace adjacent whitespace by one space."""
+    if hasattr(value, "__html__"):
+        value = t.cast("HasHTML", value).__html__()
+
+    return Markup(str(value)).striptags()
+
+
+def sync_do_slice(
+    value: "t.Collection[V]", slices: int, fill_with: "t.Optional[V]" = None
+) -> "t.Iterator[t.List[V]]":
+    """Slice an iterator and return a list of lists containing
+    those items. Useful if you want to create a div containing
+    three ul tags that represent columns:
+
+    .. sourcecode:: html+jinja
+
+        <div class="columnwrapper">
+          {%- for column in items|slice(3) %}
+            <ul class="column-{{ loop.index }}">
+            {%- for item in column %}
+              <li>{{ item }}</li>
+            {%- endfor %}
+            </ul>
+          {%- endfor %}
+        </div>
+
+    If you pass it a second argument it's used to fill missing
+    values on the last iteration.
+    """
+    seq = list(value)
+    length = len(seq)
+    items_per_slice = length // slices
+    slices_with_extra = length % slices
+    offset = 0
+
+    for slice_number in range(slices):
+        start = offset + slice_number * items_per_slice
+
+        if slice_number < slices_with_extra:
+            offset += 1
+
+        end = offset + (slice_number + 1) * items_per_slice
+        tmp = seq[start:end]
+
+        if fill_with is not None and slice_number >= slices_with_extra:
+            tmp.append(fill_with)
+
+        yield tmp
+
+
+@async_variant(sync_do_slice)  # type: ignore
+async def do_slice(
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    slices: int,
+    fill_with: t.Optional[t.Any] = None,
+) -> "t.Iterator[t.List[V]]":
+    return sync_do_slice(await auto_to_list(value), slices, fill_with)
+
+
+def do_batch(
+    value: "t.Iterable[V]", linecount: int, fill_with: "t.Optional[V]" = None
+) -> "t.Iterator[t.List[V]]":
+    """
+    A filter that batches items. It works pretty much like `slice`
+    just the other way round. It returns a list of lists with the
+    given number of items. If you provide a second parameter this
+    is used to fill up missing items. See this example:
+
+    .. sourcecode:: html+jinja
+
+        <table>
+        {%- for row in items|batch(3, '&nbsp;') %}
+          <tr>
+          {%- for column in row %}
+            <td>{{ column }}</td>
+          {%- endfor %}
+          </tr>
+        {%- endfor %}
+        </table>
+    """
+    tmp: "t.List[V]" = []
+
+    for item in value:
+        if len(tmp) == linecount:
+            yield tmp
+            tmp = []
+
+        tmp.append(item)
+
+    if tmp:
+        if fill_with is not None and len(tmp) < linecount:
+            tmp += [fill_with] * (linecount - len(tmp))
+
+        yield tmp
+
+
+def do_round(
+    value: float,
+    precision: int = 0,
+    method: 'te.Literal["common", "ceil", "floor"]' = "common",
+) -> float:
+    """Round the number to a given precision. The first
+    parameter specifies the precision (default is ``0``), the
+    second the rounding method:
+
+    - ``'common'`` rounds either up or down
+    - ``'ceil'`` always rounds up
+    - ``'floor'`` always rounds down
+
+    If you don't specify a method ``'common'`` is used.
+
+    .. sourcecode:: jinja
+
+        {{ 42.55|round }}
+            -> 43.0
+        {{ 42.55|round(1, 'floor') }}
+            -> 42.5
+
+    Note that even if rounded to 0 precision, a float is returned.  If
+    you need a real integer, pipe it through `int`:
+
+    .. sourcecode:: jinja
+
+        {{ 42.55|round|int }}
+            -> 43
+    """
+    if method not in {"common", "ceil", "floor"}:
+        raise FilterArgumentError("method must be common, ceil or floor")
+
+    if method == "common":
+        return round(value, precision)
+
+    func = getattr(math, method)
+    return t.cast(float, func(value * (10**precision)) / (10**precision))
+
+
+class _GroupTuple(t.NamedTuple):
+    grouper: t.Any
+    list: t.List
+
+    # Use the regular tuple repr to hide this subclass if users print
+    # out the value during debugging.
+    def __repr__(self) -> str:
+        return tuple.__repr__(self)
+
+    def __str__(self) -> str:
+        return tuple.__str__(self)
+
+
+@pass_environment
+def sync_do_groupby(
+    environment: "Environment",
+    value: "t.Iterable[V]",
+    attribute: t.Union[str, int],
+    default: t.Optional[t.Any] = None,
+    case_sensitive: bool = False,
+) -> "t.List[_GroupTuple]":
+    """Group a sequence of objects by an attribute using Python's
+    :func:`itertools.groupby`. The attribute can use dot notation for
+    nested access, like ``"address.city"``. Unlike Python's ``groupby``,
+    the values are sorted first so only one group is returned for each
+    unique value.
+
+    For example, a list of ``User`` objects with a ``city`` attribute
+    can be rendered in groups. In this example, ``grouper`` refers to
+    the ``city`` value of the group.
+
+    .. sourcecode:: html+jinja
+
+        <ul>{% for city, items in users|groupby("city") %}
+          <li>{{ city }}
+            <ul>{% for user in items %}
+              <li>{{ user.name }}
+            {% endfor %}</ul>
+          </li>
+        {% endfor %}</ul>
+
+    ``groupby`` yields namedtuples of ``(grouper, list)``, which
+    can be used instead of the tuple unpacking above. ``grouper`` is the
+    value of the attribute, and ``list`` is the items with that value.
+
+    .. sourcecode:: html+jinja
+
+        <ul>{% for group in users|groupby("city") %}
+          <li>{{ group.grouper }}: {{ group.list|join(", ") }}
+        {% endfor %}</ul>
+
+    You can specify a ``default`` value to use if an object in the list
+    does not have the given attribute.
+
+    .. sourcecode:: jinja
+
+        <ul>{% for city, items in users|groupby("city", default="NY") %}
+          <li>{{ city }}: {{ items|map(attribute="name")|join(", ") }}</li>
+        {% endfor %}</ul>
+
+    Like the :func:`~jinja-filters.sort` filter, sorting and grouping is
+    case-insensitive by default. The ``key`` for each group will have
+    the case of the first item in that group of values. For example, if
+    a list of users has cities ``["CA", "NY", "ca"]``, the "CA" group
+    will have two values. This can be disabled by passing
+    ``case_sensitive=True``.
+
+    .. versionchanged:: 3.1
+        Added the ``case_sensitive`` parameter. Sorting and grouping is
+        case-insensitive by default, matching other filters that do
+        comparisons.
+
+    .. versionchanged:: 3.0
+        Added the ``default`` parameter.
+
+    .. versionchanged:: 2.6
+        The attribute supports dot notation for nested access.
+    """
+    expr = make_attrgetter(
+        environment,
+        attribute,
+        postprocess=ignore_case if not case_sensitive else None,
+        default=default,
+    )
+    out = [
+        _GroupTuple(key, list(values))
+        for key, values in groupby(sorted(value, key=expr), expr)
+    ]
+
+    if not case_sensitive:
+        # Return the real key from the first value instead of the lowercase key.
+        output_expr = make_attrgetter(environment, attribute, default=default)
+        out = [_GroupTuple(output_expr(values[0]), values) for _, values in out]
+
+    return out
+
+
+@async_variant(sync_do_groupby)  # type: ignore
+async def do_groupby(
+    environment: "Environment",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    attribute: t.Union[str, int],
+    default: t.Optional[t.Any] = None,
+    case_sensitive: bool = False,
+) -> "t.List[_GroupTuple]":
+    expr = make_attrgetter(
+        environment,
+        attribute,
+        postprocess=ignore_case if not case_sensitive else None,
+        default=default,
+    )
+    out = [
+        _GroupTuple(key, await auto_to_list(values))
+        for key, values in groupby(sorted(await auto_to_list(value), key=expr), expr)
+    ]
+
+    if not case_sensitive:
+        # Return the real key from the first value instead of the lowercase key.
+        output_expr = make_attrgetter(environment, attribute, default=default)
+        out = [_GroupTuple(output_expr(values[0]), values) for _, values in out]
+
+    return out
+
+
+@pass_environment
+def sync_do_sum(
+    environment: "Environment",
+    iterable: "t.Iterable[V]",
+    attribute: t.Optional[t.Union[str, int]] = None,
+    start: V = 0,  # type: ignore
+) -> V:
+    """Returns the sum of a sequence of numbers plus the value of parameter
+    'start' (which defaults to 0).  When the sequence is empty it returns
+    start.
+
+    It is also possible to sum up only certain attributes:
+
+    .. sourcecode:: jinja
+
+        Total: {{ items|sum(attribute='price') }}
+
+    .. versionchanged:: 2.6
+       The ``attribute`` parameter was added to allow summing up over
+       attributes.  Also the ``start`` parameter was moved on to the right.
+    """
+    if attribute is not None:
+        iterable = map(make_attrgetter(environment, attribute), iterable)
+
+    return sum(iterable, start)  # type: ignore[no-any-return, call-overload]
+
+
+@async_variant(sync_do_sum)  # type: ignore
+async def do_sum(
+    environment: "Environment",
+    iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    attribute: t.Optional[t.Union[str, int]] = None,
+    start: V = 0,  # type: ignore
+) -> V:
+    rv = start
+
+    if attribute is not None:
+        func = make_attrgetter(environment, attribute)
+    else:
+
+        def func(x: V) -> V:
+            return x
+
+    async for item in auto_aiter(iterable):
+        rv += func(item)
+
+    return rv
+
+
+def sync_do_list(value: "t.Iterable[V]") -> "t.List[V]":
+    """Convert the value into a list.  If it was a string the returned list
+    will be a list of characters.
+    """
+    return list(value)
+
+
+@async_variant(sync_do_list)  # type: ignore
+async def do_list(value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]") -> "t.List[V]":
+    return await auto_to_list(value)
+
+
+def do_mark_safe(value: str) -> Markup:
+    """Mark the value as safe which means that in an environment with automatic
+    escaping enabled this variable will not be escaped.
+    """
+    return Markup(value)
+
+
+def do_mark_unsafe(value: str) -> str:
+    """Mark a value as unsafe.  This is the reverse operation for :func:`safe`."""
+    return str(value)
+
+
+@typing.overload
+def do_reverse(value: str) -> str:
+    ...
+
+
+@typing.overload
+def do_reverse(value: "t.Iterable[V]") -> "t.Iterable[V]":
+    ...
+
+
+def do_reverse(value: t.Union[str, t.Iterable[V]]) -> t.Union[str, t.Iterable[V]]:
+    """Reverse the object or return an iterator that iterates over it the other
+    way round.
+    """
+    if isinstance(value, str):
+        return value[::-1]
+
+    try:
+        return reversed(value)  # type: ignore
+    except TypeError:
+        try:
+            rv = list(value)
+            rv.reverse()
+            return rv
+        except TypeError as e:
+            raise FilterArgumentError("argument must be iterable") from e
+
+
+@pass_environment
+def do_attr(
+    environment: "Environment", obj: t.Any, name: str
+) -> t.Union[Undefined, t.Any]:
+    """Get an attribute of an object.  ``foo|attr("bar")`` works like
+    ``foo.bar`` just that always an attribute is returned and items are not
+    looked up.
+
+    See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
+    """
+    try:
+        name = str(name)
+    except UnicodeError:
+        pass
+    else:
+        try:
+            value = getattr(obj, name)
+        except AttributeError:
+            pass
+        else:
+            if environment.sandboxed:
+                environment = t.cast("SandboxedEnvironment", environment)
+
+                if not environment.is_safe_attribute(obj, name, value):
+                    return environment.unsafe_undefined(obj, name)
+
+            return value
+
+    return environment.undefined(obj=obj, name=name)
+
+
+@typing.overload
+def sync_do_map(
+    context: "Context", value: t.Iterable, name: str, *args: t.Any, **kwargs: t.Any
+) -> t.Iterable:
+    ...
+
+
+@typing.overload
+def sync_do_map(
+    context: "Context",
+    value: t.Iterable,
+    *,
+    attribute: str = ...,
+    default: t.Optional[t.Any] = None,
+) -> t.Iterable:
+    ...
+
+
+@pass_context
+def sync_do_map(
+    context: "Context", value: t.Iterable, *args: t.Any, **kwargs: t.Any
+) -> t.Iterable:
+    """Applies a filter on a sequence of objects or looks up an attribute.
+    This is useful when dealing with lists of objects but you are really
+    only interested in a certain value of it.
+
+    The basic usage is mapping on an attribute.  Imagine you have a list
+    of users but you are only interested in a list of usernames:
+
+    .. sourcecode:: jinja
+
+        Users on this page: {{ users|map(attribute='username')|join(', ') }}
+
+    You can specify a ``default`` value to use if an object in the list
+    does not have the given attribute.
+
+    .. sourcecode:: jinja
+
+        {{ users|map(attribute="username", default="Anonymous")|join(", ") }}
+
+    Alternatively you can let it invoke a filter by passing the name of the
+    filter and the arguments afterwards.  A good example would be applying a
+    text conversion filter on a sequence:
+
+    .. sourcecode:: jinja
+
+        Users on this page: {{ titles|map('lower')|join(', ') }}
+
+    Similar to a generator comprehension such as:
+
+    .. code-block:: python
+
+        (u.username for u in users)
+        (getattr(u, "username", "Anonymous") for u in users)
+        (do_lower(x) for x in titles)
+
+    .. versionchanged:: 2.11.0
+        Added the ``default`` parameter.
+
+    .. versionadded:: 2.7
+    """
+    if value:
+        func = prepare_map(context, args, kwargs)
+
+        for item in value:
+            yield func(item)
+
+
+@typing.overload
+def do_map(
+    context: "Context",
+    value: t.Union[t.AsyncIterable, t.Iterable],
+    name: str,
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> t.Iterable:
+    ...
+
+
+@typing.overload
+def do_map(
+    context: "Context",
+    value: t.Union[t.AsyncIterable, t.Iterable],
+    *,
+    attribute: str = ...,
+    default: t.Optional[t.Any] = None,
+) -> t.Iterable:
+    ...
+
+
+@async_variant(sync_do_map)  # type: ignore
+async def do_map(
+    context: "Context",
+    value: t.Union[t.AsyncIterable, t.Iterable],
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> t.AsyncIterable:
+    if value:
+        func = prepare_map(context, args, kwargs)
+
+        async for item in auto_aiter(value):
+            yield await auto_await(func(item))
+
+
+@pass_context
+def sync_do_select(
+    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
+) -> "t.Iterator[V]":
+    """Filters a sequence of objects by applying a test to each object,
+    and only selecting the objects with the test succeeding.
+
+    If no test is specified, each object will be evaluated as a boolean.
+
+    Example usage:
+
+    .. sourcecode:: jinja
+
+        {{ numbers|select("odd") }}
+        {{ numbers|select("odd") }}
+        {{ numbers|select("divisibleby", 3) }}
+        {{ numbers|select("lessthan", 42) }}
+        {{ strings|select("equalto", "mystring") }}
+
+    Similar to a generator comprehension such as:
+
+    .. code-block:: python
+
+        (n for n in numbers if test_odd(n))
+        (n for n in numbers if test_divisibleby(n, 3))
+
+    .. versionadded:: 2.7
+    """
+    return select_or_reject(context, value, args, kwargs, lambda x: x, False)
+
+
+@async_variant(sync_do_select)  # type: ignore
+async def do_select(
+    context: "Context",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> "t.AsyncIterator[V]":
+    return async_select_or_reject(context, value, args, kwargs, lambda x: x, False)
+
+
+@pass_context
+def sync_do_reject(
+    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
+) -> "t.Iterator[V]":
+    """Filters a sequence of objects by applying a test to each object,
+    and rejecting the objects with the test succeeding.
+
+    If no test is specified, each object will be evaluated as a boolean.
+
+    Example usage:
+
+    .. sourcecode:: jinja
+
+        {{ numbers|reject("odd") }}
+
+    Similar to a generator comprehension such as:
+
+    .. code-block:: python
+
+        (n for n in numbers if not test_odd(n))
+
+    .. versionadded:: 2.7
+    """
+    return select_or_reject(context, value, args, kwargs, lambda x: not x, False)
+
+
+@async_variant(sync_do_reject)  # type: ignore
+async def do_reject(
+    context: "Context",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> "t.AsyncIterator[V]":
+    return async_select_or_reject(context, value, args, kwargs, lambda x: not x, False)
+
+
+@pass_context
+def sync_do_selectattr(
+    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
+) -> "t.Iterator[V]":
+    """Filters a sequence of objects by applying a test to the specified
+    attribute of each object, and only selecting the objects with the
+    test succeeding.
+
+    If no test is specified, the attribute's value will be evaluated as
+    a boolean.
+
+    Example usage:
+
+    .. sourcecode:: jinja
+
+        {{ users|selectattr("is_active") }}
+        {{ users|selectattr("email", "none") }}
+
+    Similar to a generator comprehension such as:
+
+    .. code-block:: python
+
+        (u for user in users if user.is_active)
+        (u for user in users if test_none(user.email))
+
+    .. versionadded:: 2.7
+    """
+    return select_or_reject(context, value, args, kwargs, lambda x: x, True)
+
+
+@async_variant(sync_do_selectattr)  # type: ignore
+async def do_selectattr(
+    context: "Context",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> "t.AsyncIterator[V]":
+    return async_select_or_reject(context, value, args, kwargs, lambda x: x, True)
+
+
+@pass_context
+def sync_do_rejectattr(
+    context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any
+) -> "t.Iterator[V]":
+    """Filters a sequence of objects by applying a test to the specified
+    attribute of each object, and rejecting the objects with the test
+    succeeding.
+
+    If no test is specified, the attribute's value will be evaluated as
+    a boolean.
+
+    .. sourcecode:: jinja
+
+        {{ users|rejectattr("is_active") }}
+        {{ users|rejectattr("email", "none") }}
+
+    Similar to a generator comprehension such as:
+
+    .. code-block:: python
+
+        (u for user in users if not user.is_active)
+        (u for user in users if not test_none(user.email))
+
+    .. versionadded:: 2.7
+    """
+    return select_or_reject(context, value, args, kwargs, lambda x: not x, True)
+
+
+@async_variant(sync_do_rejectattr)  # type: ignore
+async def do_rejectattr(
+    context: "Context",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    *args: t.Any,
+    **kwargs: t.Any,
+) -> "t.AsyncIterator[V]":
+    return async_select_or_reject(context, value, args, kwargs, lambda x: not x, True)
+
+
+@pass_eval_context
+def do_tojson(
+    eval_ctx: "EvalContext", value: t.Any, indent: t.Optional[int] = None
+) -> Markup:
+    """Serialize an object to a string of JSON, and mark it safe to
+    render in HTML. This filter is only for use in HTML documents.
+
+    The returned string is safe to render in HTML documents and
+    ``<script>`` tags. The exception is in HTML attributes that are
+    double quoted; either use single quotes or the ``|forceescape``
+    filter.
+
+    :param value: The object to serialize to JSON.
+    :param indent: The ``indent`` parameter passed to ``dumps``, for
+        pretty-printing the value.
+
+    .. versionadded:: 2.9
+    """
+    policies = eval_ctx.environment.policies
+    dumps = policies["json.dumps_function"]
+    kwargs = policies["json.dumps_kwargs"]
+
+    if indent is not None:
+        kwargs = kwargs.copy()
+        kwargs["indent"] = indent
+
+    return htmlsafe_json_dumps(value, dumps=dumps, **kwargs)
+
+
+def prepare_map(
+    context: "Context", args: t.Tuple, kwargs: t.Dict[str, t.Any]
+) -> t.Callable[[t.Any], t.Any]:
+    if not args and "attribute" in kwargs:
+        attribute = kwargs.pop("attribute")
+        default = kwargs.pop("default", None)
+
+        if kwargs:
+            raise FilterArgumentError(
+                f"Unexpected keyword argument {next(iter(kwargs))!r}"
+            )
+
+        func = make_attrgetter(context.environment, attribute, default=default)
+    else:
+        try:
+            name = args[0]
+            args = args[1:]
+        except LookupError:
+            raise FilterArgumentError("map requires a filter argument") from None
+
+        def func(item: t.Any) -> t.Any:
+            return context.environment.call_filter(
+                name, item, args, kwargs, context=context
+            )
+
+    return func
+
+
+def prepare_select_or_reject(
+    context: "Context",
+    args: t.Tuple,
+    kwargs: t.Dict[str, t.Any],
+    modfunc: t.Callable[[t.Any], t.Any],
+    lookup_attr: bool,
+) -> t.Callable[[t.Any], t.Any]:
+    if lookup_attr:
+        try:
+            attr = args[0]
+        except LookupError:
+            raise FilterArgumentError("Missing parameter for attribute name") from None
+
+        transfunc = make_attrgetter(context.environment, attr)
+        off = 1
+    else:
+        off = 0
+
+        def transfunc(x: V) -> V:
+            return x
+
+    try:
+        name = args[off]
+        args = args[1 + off :]
+
+        def func(item: t.Any) -> t.Any:
+            return context.environment.call_test(name, item, args, kwargs)
+
+    except LookupError:
+        func = bool  # type: ignore
+
+    return lambda item: modfunc(func(transfunc(item)))
+
+
+def select_or_reject(
+    context: "Context",
+    value: "t.Iterable[V]",
+    args: t.Tuple,
+    kwargs: t.Dict[str, t.Any],
+    modfunc: t.Callable[[t.Any], t.Any],
+    lookup_attr: bool,
+) -> "t.Iterator[V]":
+    if value:
+        func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr)
+
+        for item in value:
+            if func(item):
+                yield item
+
+
+async def async_select_or_reject(
+    context: "Context",
+    value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
+    args: t.Tuple,
+    kwargs: t.Dict[str, t.Any],
+    modfunc: t.Callable[[t.Any], t.Any],
+    lookup_attr: bool,
+) -> "t.AsyncIterator[V]":
+    if value:
+        func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr)
+
+        async for item in auto_aiter(value):
+            if func(item):
+                yield item
+
+
+FILTERS = {
+    "abs": abs,
+    "attr": do_attr,
+    "batch": do_batch,
+    "capitalize": do_capitalize,
+    "center": do_center,
+    "count": len,
+    "d": do_default,
+    "default": do_default,
+    "dictsort": do_dictsort,
+    "e": escape,
+    "escape": escape,
+    "filesizeformat": do_filesizeformat,
+    "first": do_first,
+    "float": do_float,
+    "forceescape": do_forceescape,
+    "format": do_format,
+    "groupby": do_groupby,
+    "indent": do_indent,
+    "int": do_int,
+    "join": do_join,
+    "last": do_last,
+    "length": len,
+    "list": do_list,
+    "lower": do_lower,
+    "items": do_items,
+    "map": do_map,
+    "min": do_min,
+    "max": do_max,
+    "pprint": do_pprint,
+    "random": do_random,
+    "reject": do_reject,
+    "rejectattr": do_rejectattr,
+    "replace": do_replace,
+    "reverse": do_reverse,
+    "round": do_round,
+    "safe": do_mark_safe,
+    "select": do_select,
+    "selectattr": do_selectattr,
+    "slice": do_slice,
+    "sort": do_sort,
+    "string": soft_str,
+    "striptags": do_striptags,
+    "sum": do_sum,
+    "title": do_title,
+    "trim": do_trim,
+    "truncate": do_truncate,
+    "unique": do_unique,
+    "upper": do_upper,
+    "urlencode": do_urlencode,
+    "urlize": do_urlize,
+    "wordcount": do_wordcount,
+    "wordwrap": do_wordwrap,
+    "xmlattr": do_xmlattr,
+    "tojson": do_tojson,
+}
diff --git a/venv/lib/python3.9/site-packages/jinja2/idtracking.py b/venv/lib/python3.9/site-packages/jinja2/idtracking.py
new file mode 100644
index 0000000..995ebaa
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/idtracking.py
@@ -0,0 +1,318 @@
+import typing as t
+
+from . import nodes
+from .visitor import NodeVisitor
+
+VAR_LOAD_PARAMETER = "param"
+VAR_LOAD_RESOLVE = "resolve"
+VAR_LOAD_ALIAS = "alias"
+VAR_LOAD_UNDEFINED = "undefined"
+
+
+def find_symbols(
+    nodes: t.Iterable[nodes.Node], parent_symbols: t.Optional["Symbols"] = None
+) -> "Symbols":
+    sym = Symbols(parent=parent_symbols)
+    visitor = FrameSymbolVisitor(sym)
+    for node in nodes:
+        visitor.visit(node)
+    return sym
+
+
+def symbols_for_node(
+    node: nodes.Node, parent_symbols: t.Optional["Symbols"] = None
+) -> "Symbols":
+    sym = Symbols(parent=parent_symbols)
+    sym.analyze_node(node)
+    return sym
+
+
+class Symbols:
+    def __init__(
+        self, parent: t.Optional["Symbols"] = None, level: t.Optional[int] = None
+    ) -> None:
+        if level is None:
+            if parent is None:
+                level = 0
+            else:
+                level = parent.level + 1
+
+        self.level: int = level
+        self.parent = parent
+        self.refs: t.Dict[str, str] = {}
+        self.loads: t.Dict[str, t.Any] = {}
+        self.stores: t.Set[str] = set()
+
+    def analyze_node(self, node: nodes.Node, **kwargs: t.Any) -> None:
+        visitor = RootVisitor(self)
+        visitor.visit(node, **kwargs)
+
+    def _define_ref(
+        self, name: str, load: t.Optional[t.Tuple[str, t.Optional[str]]] = None
+    ) -> str:
+        ident = f"l_{self.level}_{name}"
+        self.refs[name] = ident
+        if load is not None:
+            self.loads[ident] = load
+        return ident
+
+    def find_load(self, target: str) -> t.Optional[t.Any]:
+        if target in self.loads:
+            return self.loads[target]
+
+        if self.parent is not None:
+            return self.parent.find_load(target)
+
+        return None
+
+    def find_ref(self, name: str) -> t.Optional[str]:
+        if name in self.refs:
+            return self.refs[name]
+
+        if self.parent is not None:
+            return self.parent.find_ref(name)
+
+        return None
+
+    def ref(self, name: str) -> str:
+        rv = self.find_ref(name)
+        if rv is None:
+            raise AssertionError(
+                "Tried to resolve a name to a reference that was"
+                f" unknown to the frame ({name!r})"
+            )
+        return rv
+
+    def copy(self) -> "Symbols":
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.refs = self.refs.copy()
+        rv.loads = self.loads.copy()
+        rv.stores = self.stores.copy()
+        return rv
+
+    def store(self, name: str) -> None:
+        self.stores.add(name)
+
+        # If we have not see the name referenced yet, we need to figure
+        # out what to set it to.
+        if name not in self.refs:
+            # If there is a parent scope we check if the name has a
+            # reference there.  If it does it means we might have to alias
+            # to a variable there.
+            if self.parent is not None:
+                outer_ref = self.parent.find_ref(name)
+                if outer_ref is not None:
+                    self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref))
+                    return
+
+            # Otherwise we can just set it to undefined.
+            self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
+
+    def declare_parameter(self, name: str) -> str:
+        self.stores.add(name)
+        return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
+
+    def load(self, name: str) -> None:
+        if self.find_ref(name) is None:
+            self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
+
+    def branch_update(self, branch_symbols: t.Sequence["Symbols"]) -> None:
+        stores: t.Dict[str, int] = {}
+        for branch in branch_symbols:
+            for target in branch.stores:
+                if target in self.stores:
+                    continue
+                stores[target] = stores.get(target, 0) + 1
+
+        for sym in branch_symbols:
+            self.refs.update(sym.refs)
+            self.loads.update(sym.loads)
+            self.stores.update(sym.stores)
+
+        for name, branch_count in stores.items():
+            if branch_count == len(branch_symbols):
+                continue
+
+            target = self.find_ref(name)  # type: ignore
+            assert target is not None, "should not happen"
+
+            if self.parent is not None:
+                outer_target = self.parent.find_ref(name)
+                if outer_target is not None:
+                    self.loads[target] = (VAR_LOAD_ALIAS, outer_target)
+                    continue
+            self.loads[target] = (VAR_LOAD_RESOLVE, name)
+
+    def dump_stores(self) -> t.Dict[str, str]:
+        rv: t.Dict[str, str] = {}
+        node: t.Optional["Symbols"] = self
+
+        while node is not None:
+            for name in sorted(node.stores):
+                if name not in rv:
+                    rv[name] = self.find_ref(name)  # type: ignore
+
+            node = node.parent
+
+        return rv
+
+    def dump_param_targets(self) -> t.Set[str]:
+        rv = set()
+        node: t.Optional["Symbols"] = self
+
+        while node is not None:
+            for target, (instr, _) in self.loads.items():
+                if instr == VAR_LOAD_PARAMETER:
+                    rv.add(target)
+
+            node = node.parent
+
+        return rv
+
+
+class RootVisitor(NodeVisitor):
+    def __init__(self, symbols: "Symbols") -> None:
+        self.sym_visitor = FrameSymbolVisitor(symbols)
+
+    def _simple_visit(self, node: nodes.Node, **kwargs: t.Any) -> None:
+        for child in node.iter_child_nodes():
+            self.sym_visitor.visit(child)
+
+    visit_Template = _simple_visit
+    visit_Block = _simple_visit
+    visit_Macro = _simple_visit
+    visit_FilterBlock = _simple_visit
+    visit_Scope = _simple_visit
+    visit_If = _simple_visit
+    visit_ScopedEvalContextModifier = _simple_visit
+
+    def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None:
+        for child in node.body:
+            self.sym_visitor.visit(child)
+
+    def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None:
+        for child in node.iter_child_nodes(exclude=("call",)):
+            self.sym_visitor.visit(child)
+
+    def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None:
+        for child in node.body:
+            self.sym_visitor.visit(child)
+
+    def visit_For(
+        self, node: nodes.For, for_branch: str = "body", **kwargs: t.Any
+    ) -> None:
+        if for_branch == "body":
+            self.sym_visitor.visit(node.target, store_as_param=True)
+            branch = node.body
+        elif for_branch == "else":
+            branch = node.else_
+        elif for_branch == "test":
+            self.sym_visitor.visit(node.target, store_as_param=True)
+            if node.test is not None:
+                self.sym_visitor.visit(node.test)
+            return
+        else:
+            raise RuntimeError("Unknown for branch")
+
+        if branch:
+            for item in branch:
+                self.sym_visitor.visit(item)
+
+    def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None:
+        for target in node.targets:
+            self.sym_visitor.visit(target)
+        for child in node.body:
+            self.sym_visitor.visit(child)
+
+    def generic_visit(self, node: nodes.Node, *args: t.Any, **kwargs: t.Any) -> None:
+        raise NotImplementedError(f"Cannot find symbols for {type(node).__name__!r}")
+
+
+class FrameSymbolVisitor(NodeVisitor):
+    """A visitor for `Frame.inspect`."""
+
+    def __init__(self, symbols: "Symbols") -> None:
+        self.symbols = symbols
+
+    def visit_Name(
+        self, node: nodes.Name, store_as_param: bool = False, **kwargs: t.Any
+    ) -> None:
+        """All assignments to names go through this function."""
+        if store_as_param or node.ctx == "param":
+            self.symbols.declare_parameter(node.name)
+        elif node.ctx == "store":
+            self.symbols.store(node.name)
+        elif node.ctx == "load":
+            self.symbols.load(node.name)
+
+    def visit_NSRef(self, node: nodes.NSRef, **kwargs: t.Any) -> None:
+        self.symbols.load(node.name)
+
+    def visit_If(self, node: nodes.If, **kwargs: t.Any) -> None:
+        self.visit(node.test, **kwargs)
+        original_symbols = self.symbols
+
+        def inner_visit(nodes: t.Iterable[nodes.Node]) -> "Symbols":
+            self.symbols = rv = original_symbols.copy()
+
+            for subnode in nodes:
+                self.visit(subnode, **kwargs)
+
+            self.symbols = original_symbols
+            return rv
+
+        body_symbols = inner_visit(node.body)
+        elif_symbols = inner_visit(node.elif_)
+        else_symbols = inner_visit(node.else_ or ())
+        self.symbols.branch_update([body_symbols, elif_symbols, else_symbols])
+
+    def visit_Macro(self, node: nodes.Macro, **kwargs: t.Any) -> None:
+        self.symbols.store(node.name)
+
+    def visit_Import(self, node: nodes.Import, **kwargs: t.Any) -> None:
+        self.generic_visit(node, **kwargs)
+        self.symbols.store(node.target)
+
+    def visit_FromImport(self, node: nodes.FromImport, **kwargs: t.Any) -> None:
+        self.generic_visit(node, **kwargs)
+
+        for name in node.names:
+            if isinstance(name, tuple):
+                self.symbols.store(name[1])
+            else:
+                self.symbols.store(name)
+
+    def visit_Assign(self, node: nodes.Assign, **kwargs: t.Any) -> None:
+        """Visit assignments in the correct order."""
+        self.visit(node.node, **kwargs)
+        self.visit(node.target, **kwargs)
+
+    def visit_For(self, node: nodes.For, **kwargs: t.Any) -> None:
+        """Visiting stops at for blocks.  However the block sequence
+        is visited as part of the outer scope.
+        """
+        self.visit(node.iter, **kwargs)
+
+    def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None:
+        self.visit(node.call, **kwargs)
+
+    def visit_FilterBlock(self, node: nodes.FilterBlock, **kwargs: t.Any) -> None:
+        self.visit(node.filter, **kwargs)
+
+    def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None:
+        for target in node.values:
+            self.visit(target)
+
+    def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None:
+        """Stop visiting at block assigns."""
+        self.visit(node.target, **kwargs)
+
+    def visit_Scope(self, node: nodes.Scope, **kwargs: t.Any) -> None:
+        """Stop visiting at scopes."""
+
+    def visit_Block(self, node: nodes.Block, **kwargs: t.Any) -> None:
+        """Stop visiting at blocks."""
+
+    def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None:
+        """Do not visit into overlay scopes."""
diff --git a/venv/lib/python3.9/site-packages/jinja2/lexer.py b/venv/lib/python3.9/site-packages/jinja2/lexer.py
new file mode 100644
index 0000000..aff7e9f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/lexer.py
@@ -0,0 +1,866 @@
+"""Implements a Jinja / Python combination lexer. The ``Lexer`` class
+is used to do some preprocessing. It filters out invalid operators like
+the bitshift operators we don't allow in templates. It separates
+template code and python code in expressions.
+"""
+import re
+import typing as t
+from ast import literal_eval
+from collections import deque
+from sys import intern
+
+from ._identifier import pattern as name_re
+from .exceptions import TemplateSyntaxError
+from .utils import LRUCache
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .environment import Environment
+
+# cache for the lexers. Exists in order to be able to have multiple
+# environments with the same lexer
+_lexer_cache: t.MutableMapping[t.Tuple, "Lexer"] = LRUCache(50)  # type: ignore
+
+# static regular expressions
+whitespace_re = re.compile(r"\s+")
+newline_re = re.compile(r"(\r\n|\r|\n)")
+string_re = re.compile(
+    r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S
+)
+integer_re = re.compile(
+    r"""
+    (
+        0b(_?[0-1])+ # binary
+    |
+        0o(_?[0-7])+ # octal
+    |
+        0x(_?[\da-f])+ # hex
+    |
+        [1-9](_?\d)* # decimal
+    |
+        0(_?0)* # decimal zero
+    )
+    """,
+    re.IGNORECASE | re.VERBOSE,
+)
+float_re = re.compile(
+    r"""
+    (?<!\.)  # doesn't start with a .
+    (\d+_)*\d+  # digits, possibly _ separated
+    (
+        (\.(\d+_)*\d+)?  # optional fractional part
+        e[+\-]?(\d+_)*\d+  # exponent part
+    |
+        \.(\d+_)*\d+  # required fractional part
+    )
+    """,
+    re.IGNORECASE | re.VERBOSE,
+)
+
+# internal the tokens and keep references to them
+TOKEN_ADD = intern("add")
+TOKEN_ASSIGN = intern("assign")
+TOKEN_COLON = intern("colon")
+TOKEN_COMMA = intern("comma")
+TOKEN_DIV = intern("div")
+TOKEN_DOT = intern("dot")
+TOKEN_EQ = intern("eq")
+TOKEN_FLOORDIV = intern("floordiv")
+TOKEN_GT = intern("gt")
+TOKEN_GTEQ = intern("gteq")
+TOKEN_LBRACE = intern("lbrace")
+TOKEN_LBRACKET = intern("lbracket")
+TOKEN_LPAREN = intern("lparen")
+TOKEN_LT = intern("lt")
+TOKEN_LTEQ = intern("lteq")
+TOKEN_MOD = intern("mod")
+TOKEN_MUL = intern("mul")
+TOKEN_NE = intern("ne")
+TOKEN_PIPE = intern("pipe")
+TOKEN_POW = intern("pow")
+TOKEN_RBRACE = intern("rbrace")
+TOKEN_RBRACKET = intern("rbracket")
+TOKEN_RPAREN = intern("rparen")
+TOKEN_SEMICOLON = intern("semicolon")
+TOKEN_SUB = intern("sub")
+TOKEN_TILDE = intern("tilde")
+TOKEN_WHITESPACE = intern("whitespace")
+TOKEN_FLOAT = intern("float")
+TOKEN_INTEGER = intern("integer")
+TOKEN_NAME = intern("name")
+TOKEN_STRING = intern("string")
+TOKEN_OPERATOR = intern("operator")
+TOKEN_BLOCK_BEGIN = intern("block_begin")
+TOKEN_BLOCK_END = intern("block_end")
+TOKEN_VARIABLE_BEGIN = intern("variable_begin")
+TOKEN_VARIABLE_END = intern("variable_end")
+TOKEN_RAW_BEGIN = intern("raw_begin")
+TOKEN_RAW_END = intern("raw_end")
+TOKEN_COMMENT_BEGIN = intern("comment_begin")
+TOKEN_COMMENT_END = intern("comment_end")
+TOKEN_COMMENT = intern("comment")
+TOKEN_LINESTATEMENT_BEGIN = intern("linestatement_begin")
+TOKEN_LINESTATEMENT_END = intern("linestatement_end")
+TOKEN_LINECOMMENT_BEGIN = intern("linecomment_begin")
+TOKEN_LINECOMMENT_END = intern("linecomment_end")
+TOKEN_LINECOMMENT = intern("linecomment")
+TOKEN_DATA = intern("data")
+TOKEN_INITIAL = intern("initial")
+TOKEN_EOF = intern("eof")
+
+# bind operators to token types
+operators = {
+    "+": TOKEN_ADD,
+    "-": TOKEN_SUB,
+    "/": TOKEN_DIV,
+    "//": TOKEN_FLOORDIV,
+    "*": TOKEN_MUL,
+    "%": TOKEN_MOD,
+    "**": TOKEN_POW,
+    "~": TOKEN_TILDE,
+    "[": TOKEN_LBRACKET,
+    "]": TOKEN_RBRACKET,
+    "(": TOKEN_LPAREN,
+    ")": TOKEN_RPAREN,
+    "{": TOKEN_LBRACE,
+    "}": TOKEN_RBRACE,
+    "==": TOKEN_EQ,
+    "!=": TOKEN_NE,
+    ">": TOKEN_GT,
+    ">=": TOKEN_GTEQ,
+    "<": TOKEN_LT,
+    "<=": TOKEN_LTEQ,
+    "=": TOKEN_ASSIGN,
+    ".": TOKEN_DOT,
+    ":": TOKEN_COLON,
+    "|": TOKEN_PIPE,
+    ",": TOKEN_COMMA,
+    ";": TOKEN_SEMICOLON,
+}
+
+reverse_operators = {v: k for k, v in operators.items()}
+assert len(operators) == len(reverse_operators), "operators dropped"
+operator_re = re.compile(
+    f"({'|'.join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))})"
+)
+
+ignored_tokens = frozenset(
+    [
+        TOKEN_COMMENT_BEGIN,
+        TOKEN_COMMENT,
+        TOKEN_COMMENT_END,
+        TOKEN_WHITESPACE,
+        TOKEN_LINECOMMENT_BEGIN,
+        TOKEN_LINECOMMENT_END,
+        TOKEN_LINECOMMENT,
+    ]
+)
+ignore_if_empty = frozenset(
+    [TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT, TOKEN_LINECOMMENT]
+)
+
+
+def _describe_token_type(token_type: str) -> str:
+    if token_type in reverse_operators:
+        return reverse_operators[token_type]
+
+    return {
+        TOKEN_COMMENT_BEGIN: "begin of comment",
+        TOKEN_COMMENT_END: "end of comment",
+        TOKEN_COMMENT: "comment",
+        TOKEN_LINECOMMENT: "comment",
+        TOKEN_BLOCK_BEGIN: "begin of statement block",
+        TOKEN_BLOCK_END: "end of statement block",
+        TOKEN_VARIABLE_BEGIN: "begin of print statement",
+        TOKEN_VARIABLE_END: "end of print statement",
+        TOKEN_LINESTATEMENT_BEGIN: "begin of line statement",
+        TOKEN_LINESTATEMENT_END: "end of line statement",
+        TOKEN_DATA: "template data / text",
+        TOKEN_EOF: "end of template",
+    }.get(token_type, token_type)
+
+
+def describe_token(token: "Token") -> str:
+    """Returns a description of the token."""
+    if token.type == TOKEN_NAME:
+        return token.value
+
+    return _describe_token_type(token.type)
+
+
+def describe_token_expr(expr: str) -> str:
+    """Like `describe_token` but for token expressions."""
+    if ":" in expr:
+        type, value = expr.split(":", 1)
+
+        if type == TOKEN_NAME:
+            return value
+    else:
+        type = expr
+
+    return _describe_token_type(type)
+
+
+def count_newlines(value: str) -> int:
+    """Count the number of newline characters in the string.  This is
+    useful for extensions that filter a stream.
+    """
+    return len(newline_re.findall(value))
+
+
+def compile_rules(environment: "Environment") -> t.List[t.Tuple[str, str]]:
+    """Compiles all the rules from the environment into a list of rules."""
+    e = re.escape
+    rules = [
+        (
+            len(environment.comment_start_string),
+            TOKEN_COMMENT_BEGIN,
+            e(environment.comment_start_string),
+        ),
+        (
+            len(environment.block_start_string),
+            TOKEN_BLOCK_BEGIN,
+            e(environment.block_start_string),
+        ),
+        (
+            len(environment.variable_start_string),
+            TOKEN_VARIABLE_BEGIN,
+            e(environment.variable_start_string),
+        ),
+    ]
+
+    if environment.line_statement_prefix is not None:
+        rules.append(
+            (
+                len(environment.line_statement_prefix),
+                TOKEN_LINESTATEMENT_BEGIN,
+                r"^[ \t\v]*" + e(environment.line_statement_prefix),
+            )
+        )
+    if environment.line_comment_prefix is not None:
+        rules.append(
+            (
+                len(environment.line_comment_prefix),
+                TOKEN_LINECOMMENT_BEGIN,
+                r"(?:^|(?<=\S))[^\S\r\n]*" + e(environment.line_comment_prefix),
+            )
+        )
+
+    return [x[1:] for x in sorted(rules, reverse=True)]
+
+
+class Failure:
+    """Class that raises a `TemplateSyntaxError` if called.
+    Used by the `Lexer` to specify known errors.
+    """
+
+    def __init__(
+        self, message: str, cls: t.Type[TemplateSyntaxError] = TemplateSyntaxError
+    ) -> None:
+        self.message = message
+        self.error_class = cls
+
+    def __call__(self, lineno: int, filename: str) -> "te.NoReturn":
+        raise self.error_class(self.message, lineno, filename)
+
+
+class Token(t.NamedTuple):
+    lineno: int
+    type: str
+    value: str
+
+    def __str__(self) -> str:
+        return describe_token(self)
+
+    def test(self, expr: str) -> bool:
+        """Test a token against a token expression.  This can either be a
+        token type or ``'token_type:token_value'``.  This can only test
+        against string values and types.
+        """
+        # here we do a regular string equality check as test_any is usually
+        # passed an iterable of not interned strings.
+        if self.type == expr:
+            return True
+
+        if ":" in expr:
+            return expr.split(":", 1) == [self.type, self.value]
+
+        return False
+
+    def test_any(self, *iterable: str) -> bool:
+        """Test against multiple token expressions."""
+        return any(self.test(expr) for expr in iterable)
+
+
+class TokenStreamIterator:
+    """The iterator for tokenstreams.  Iterate over the stream
+    until the eof token is reached.
+    """
+
+    def __init__(self, stream: "TokenStream") -> None:
+        self.stream = stream
+
+    def __iter__(self) -> "TokenStreamIterator":
+        return self
+
+    def __next__(self) -> Token:
+        token = self.stream.current
+
+        if token.type is TOKEN_EOF:
+            self.stream.close()
+            raise StopIteration
+
+        next(self.stream)
+        return token
+
+
+class TokenStream:
+    """A token stream is an iterable that yields :class:`Token`\\s.  The
+    parser however does not iterate over it but calls :meth:`next` to go
+    one token ahead.  The current active token is stored as :attr:`current`.
+    """
+
+    def __init__(
+        self,
+        generator: t.Iterable[Token],
+        name: t.Optional[str],
+        filename: t.Optional[str],
+    ):
+        self._iter = iter(generator)
+        self._pushed: "te.Deque[Token]" = deque()
+        self.name = name
+        self.filename = filename
+        self.closed = False
+        self.current = Token(1, TOKEN_INITIAL, "")
+        next(self)
+
+    def __iter__(self) -> TokenStreamIterator:
+        return TokenStreamIterator(self)
+
+    def __bool__(self) -> bool:
+        return bool(self._pushed) or self.current.type is not TOKEN_EOF
+
+    @property
+    def eos(self) -> bool:
+        """Are we at the end of the stream?"""
+        return not self
+
+    def push(self, token: Token) -> None:
+        """Push a token back to the stream."""
+        self._pushed.append(token)
+
+    def look(self) -> Token:
+        """Look at the next token."""
+        old_token = next(self)
+        result = self.current
+        self.push(result)
+        self.current = old_token
+        return result
+
+    def skip(self, n: int = 1) -> None:
+        """Got n tokens ahead."""
+        for _ in range(n):
+            next(self)
+
+    def next_if(self, expr: str) -> t.Optional[Token]:
+        """Perform the token test and return the token if it matched.
+        Otherwise the return value is `None`.
+        """
+        if self.current.test(expr):
+            return next(self)
+
+        return None
+
+    def skip_if(self, expr: str) -> bool:
+        """Like :meth:`next_if` but only returns `True` or `False`."""
+        return self.next_if(expr) is not None
+
+    def __next__(self) -> Token:
+        """Go one token ahead and return the old one.
+
+        Use the built-in :func:`next` instead of calling this directly.
+        """
+        rv = self.current
+
+        if self._pushed:
+            self.current = self._pushed.popleft()
+        elif self.current.type is not TOKEN_EOF:
+            try:
+                self.current = next(self._iter)
+            except StopIteration:
+                self.close()
+
+        return rv
+
+    def close(self) -> None:
+        """Close the stream."""
+        self.current = Token(self.current.lineno, TOKEN_EOF, "")
+        self._iter = iter(())
+        self.closed = True
+
+    def expect(self, expr: str) -> Token:
+        """Expect a given token type and return it.  This accepts the same
+        argument as :meth:`jinja2.lexer.Token.test`.
+        """
+        if not self.current.test(expr):
+            expr = describe_token_expr(expr)
+
+            if self.current.type is TOKEN_EOF:
+                raise TemplateSyntaxError(
+                    f"unexpected end of template, expected {expr!r}.",
+                    self.current.lineno,
+                    self.name,
+                    self.filename,
+                )
+
+            raise TemplateSyntaxError(
+                f"expected token {expr!r}, got {describe_token(self.current)!r}",
+                self.current.lineno,
+                self.name,
+                self.filename,
+            )
+
+        return next(self)
+
+
+def get_lexer(environment: "Environment") -> "Lexer":
+    """Return a lexer which is probably cached."""
+    key = (
+        environment.block_start_string,
+        environment.block_end_string,
+        environment.variable_start_string,
+        environment.variable_end_string,
+        environment.comment_start_string,
+        environment.comment_end_string,
+        environment.line_statement_prefix,
+        environment.line_comment_prefix,
+        environment.trim_blocks,
+        environment.lstrip_blocks,
+        environment.newline_sequence,
+        environment.keep_trailing_newline,
+    )
+    lexer = _lexer_cache.get(key)
+
+    if lexer is None:
+        _lexer_cache[key] = lexer = Lexer(environment)
+
+    return lexer
+
+
+class OptionalLStrip(tuple):
+    """A special tuple for marking a point in the state that can have
+    lstrip applied.
+    """
+
+    __slots__ = ()
+
+    # Even though it looks like a no-op, creating instances fails
+    # without this.
+    def __new__(cls, *members, **kwargs):  # type: ignore
+        return super().__new__(cls, members)
+
+
+class _Rule(t.NamedTuple):
+    pattern: t.Pattern[str]
+    tokens: t.Union[str, t.Tuple[str, ...], t.Tuple[Failure]]
+    command: t.Optional[str]
+
+
+class Lexer:
+    """Class that implements a lexer for a given environment. Automatically
+    created by the environment class, usually you don't have to do that.
+
+    Note that the lexer is not automatically bound to an environment.
+    Multiple environments can share the same lexer.
+    """
+
+    def __init__(self, environment: "Environment") -> None:
+        # shortcuts
+        e = re.escape
+
+        def c(x: str) -> t.Pattern[str]:
+            return re.compile(x, re.M | re.S)
+
+        # lexing rules for tags
+        tag_rules: t.List[_Rule] = [
+            _Rule(whitespace_re, TOKEN_WHITESPACE, None),
+            _Rule(float_re, TOKEN_FLOAT, None),
+            _Rule(integer_re, TOKEN_INTEGER, None),
+            _Rule(name_re, TOKEN_NAME, None),
+            _Rule(string_re, TOKEN_STRING, None),
+            _Rule(operator_re, TOKEN_OPERATOR, None),
+        ]
+
+        # assemble the root lexing rule. because "|" is ungreedy
+        # we have to sort by length so that the lexer continues working
+        # as expected when we have parsing rules like <% for block and
+        # <%= for variables. (if someone wants asp like syntax)
+        # variables are just part of the rules if variable processing
+        # is required.
+        root_tag_rules = compile_rules(environment)
+
+        block_start_re = e(environment.block_start_string)
+        block_end_re = e(environment.block_end_string)
+        comment_end_re = e(environment.comment_end_string)
+        variable_end_re = e(environment.variable_end_string)
+
+        # block suffix if trimming is enabled
+        block_suffix_re = "\\n?" if environment.trim_blocks else ""
+
+        self.lstrip_blocks = environment.lstrip_blocks
+
+        self.newline_sequence = environment.newline_sequence
+        self.keep_trailing_newline = environment.keep_trailing_newline
+
+        root_raw_re = (
+            rf"(?P<raw_begin>{block_start_re}(\-|\+|)\s*raw\s*"
+            rf"(?:\-{block_end_re}\s*|{block_end_re}))"
+        )
+        root_parts_re = "|".join(
+            [root_raw_re] + [rf"(?P<{n}>{r}(\-|\+|))" for n, r in root_tag_rules]
+        )
+
+        # global lexing rules
+        self.rules: t.Dict[str, t.List[_Rule]] = {
+            "root": [
+                # directives
+                _Rule(
+                    c(rf"(.*?)(?:{root_parts_re})"),
+                    OptionalLStrip(TOKEN_DATA, "#bygroup"),  # type: ignore
+                    "#bygroup",
+                ),
+                # data
+                _Rule(c(".+"), TOKEN_DATA, None),
+            ],
+            # comments
+            TOKEN_COMMENT_BEGIN: [
+                _Rule(
+                    c(
+                        rf"(.*?)((?:\+{comment_end_re}|\-{comment_end_re}\s*"
+                        rf"|{comment_end_re}{block_suffix_re}))"
+                    ),
+                    (TOKEN_COMMENT, TOKEN_COMMENT_END),
+                    "#pop",
+                ),
+                _Rule(c(r"(.)"), (Failure("Missing end of comment tag"),), None),
+            ],
+            # blocks
+            TOKEN_BLOCK_BEGIN: [
+                _Rule(
+                    c(
+                        rf"(?:\+{block_end_re}|\-{block_end_re}\s*"
+                        rf"|{block_end_re}{block_suffix_re})"
+                    ),
+                    TOKEN_BLOCK_END,
+                    "#pop",
+                ),
+            ]
+            + tag_rules,
+            # variables
+            TOKEN_VARIABLE_BEGIN: [
+                _Rule(
+                    c(rf"\-{variable_end_re}\s*|{variable_end_re}"),
+                    TOKEN_VARIABLE_END,
+                    "#pop",
+                )
+            ]
+            + tag_rules,
+            # raw block
+            TOKEN_RAW_BEGIN: [
+                _Rule(
+                    c(
+                        rf"(.*?)((?:{block_start_re}(\-|\+|))\s*endraw\s*"
+                        rf"(?:\+{block_end_re}|\-{block_end_re}\s*"
+                        rf"|{block_end_re}{block_suffix_re}))"
+                    ),
+                    OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END),  # type: ignore
+                    "#pop",
+                ),
+                _Rule(c(r"(.)"), (Failure("Missing end of raw directive"),), None),
+            ],
+            # line statements
+            TOKEN_LINESTATEMENT_BEGIN: [
+                _Rule(c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
+            ]
+            + tag_rules,
+            # line comments
+            TOKEN_LINECOMMENT_BEGIN: [
+                _Rule(
+                    c(r"(.*?)()(?=\n|$)"),
+                    (TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END),
+                    "#pop",
+                )
+            ],
+        }
+
+    def _normalize_newlines(self, value: str) -> str:
+        """Replace all newlines with the configured sequence in strings
+        and template data.
+        """
+        return newline_re.sub(self.newline_sequence, value)
+
+    def tokenize(
+        self,
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        state: t.Optional[str] = None,
+    ) -> TokenStream:
+        """Calls tokeniter + tokenize and wraps it in a token stream."""
+        stream = self.tokeniter(source, name, filename, state)
+        return TokenStream(self.wrap(stream, name, filename), name, filename)
+
+    def wrap(
+        self,
+        stream: t.Iterable[t.Tuple[int, str, str]],
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+    ) -> t.Iterator[Token]:
+        """This is called with the stream as returned by `tokenize` and wraps
+        every token in a :class:`Token` and converts the value.
+        """
+        for lineno, token, value_str in stream:
+            if token in ignored_tokens:
+                continue
+
+            value: t.Any = value_str
+
+            if token == TOKEN_LINESTATEMENT_BEGIN:
+                token = TOKEN_BLOCK_BEGIN
+            elif token == TOKEN_LINESTATEMENT_END:
+                token = TOKEN_BLOCK_END
+            # we are not interested in those tokens in the parser
+            elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END):
+                continue
+            elif token == TOKEN_DATA:
+                value = self._normalize_newlines(value_str)
+            elif token == "keyword":
+                token = value_str
+            elif token == TOKEN_NAME:
+                value = value_str
+
+                if not value.isidentifier():
+                    raise TemplateSyntaxError(
+                        "Invalid character in identifier", lineno, name, filename
+                    )
+            elif token == TOKEN_STRING:
+                # try to unescape string
+                try:
+                    value = (
+                        self._normalize_newlines(value_str[1:-1])
+                        .encode("ascii", "backslashreplace")
+                        .decode("unicode-escape")
+                    )
+                except Exception as e:
+                    msg = str(e).split(":")[-1].strip()
+                    raise TemplateSyntaxError(msg, lineno, name, filename) from e
+            elif token == TOKEN_INTEGER:
+                value = int(value_str.replace("_", ""), 0)
+            elif token == TOKEN_FLOAT:
+                # remove all "_" first to support more Python versions
+                value = literal_eval(value_str.replace("_", ""))
+            elif token == TOKEN_OPERATOR:
+                token = operators[value_str]
+
+            yield Token(lineno, token, value)
+
+    def tokeniter(
+        self,
+        source: str,
+        name: t.Optional[str],
+        filename: t.Optional[str] = None,
+        state: t.Optional[str] = None,
+    ) -> t.Iterator[t.Tuple[int, str, str]]:
+        """This method tokenizes the text and returns the tokens in a
+        generator. Use this method if you just want to tokenize a template.
+
+        .. versionchanged:: 3.0
+            Only ``\\n``, ``\\r\\n`` and ``\\r`` are treated as line
+            breaks.
+        """
+        lines = newline_re.split(source)[::2]
+
+        if not self.keep_trailing_newline and lines[-1] == "":
+            del lines[-1]
+
+        source = "\n".join(lines)
+        pos = 0
+        lineno = 1
+        stack = ["root"]
+
+        if state is not None and state != "root":
+            assert state in ("variable", "block"), "invalid state"
+            stack.append(state + "_begin")
+
+        statetokens = self.rules[stack[-1]]
+        source_length = len(source)
+        balancing_stack: t.List[str] = []
+        newlines_stripped = 0
+        line_starting = True
+
+        while True:
+            # tokenizer loop
+            for regex, tokens, new_state in statetokens:
+                m = regex.match(source, pos)
+
+                # if no match we try again with the next rule
+                if m is None:
+                    continue
+
+                # we only match blocks and variables if braces / parentheses
+                # are balanced. continue parsing with the lower rule which
+                # is the operator rule. do this only if the end tags look
+                # like operators
+                if balancing_stack and tokens in (
+                    TOKEN_VARIABLE_END,
+                    TOKEN_BLOCK_END,
+                    TOKEN_LINESTATEMENT_END,
+                ):
+                    continue
+
+                # tuples support more options
+                if isinstance(tokens, tuple):
+                    groups: t.Sequence[str] = m.groups()
+
+                    if isinstance(tokens, OptionalLStrip):
+                        # Rule supports lstrip. Match will look like
+                        # text, block type, whitespace control, type, control, ...
+                        text = groups[0]
+                        # Skipping the text and first type, every other group is the
+                        # whitespace control for each type. One of the groups will be
+                        # -, +, or empty string instead of None.
+                        strip_sign = next(g for g in groups[2::2] if g is not None)
+
+                        if strip_sign == "-":
+                            # Strip all whitespace between the text and the tag.
+                            stripped = text.rstrip()
+                            newlines_stripped = text[len(stripped) :].count("\n")
+                            groups = [stripped, *groups[1:]]
+                        elif (
+                            # Not marked for preserving whitespace.
+                            strip_sign != "+"
+                            # lstrip is enabled.
+                            and self.lstrip_blocks
+                            # Not a variable expression.
+                            and not m.groupdict().get(TOKEN_VARIABLE_BEGIN)
+                        ):
+                            # The start of text between the last newline and the tag.
+                            l_pos = text.rfind("\n") + 1
+
+                            if l_pos > 0 or line_starting:
+                                # If there's only whitespace between the newline and the
+                                # tag, strip it.
+                                if whitespace_re.fullmatch(text, l_pos):
+                                    groups = [text[:l_pos], *groups[1:]]
+
+                    for idx, token in enumerate(tokens):
+                        # failure group
+                        if token.__class__ is Failure:
+                            raise token(lineno, filename)
+                        # bygroup is a bit more complex, in that case we
+                        # yield for the current token the first named
+                        # group that matched
+                        elif token == "#bygroup":
+                            for key, value in m.groupdict().items():
+                                if value is not None:
+                                    yield lineno, key, value
+                                    lineno += value.count("\n")
+                                    break
+                            else:
+                                raise RuntimeError(
+                                    f"{regex!r} wanted to resolve the token dynamically"
+                                    " but no group matched"
+                                )
+                        # normal group
+                        else:
+                            data = groups[idx]
+
+                            if data or token not in ignore_if_empty:
+                                yield lineno, token, data
+
+                            lineno += data.count("\n") + newlines_stripped
+                            newlines_stripped = 0
+
+                # strings as token just are yielded as it.
+                else:
+                    data = m.group()
+
+                    # update brace/parentheses balance
+                    if tokens == TOKEN_OPERATOR:
+                        if data == "{":
+                            balancing_stack.append("}")
+                        elif data == "(":
+                            balancing_stack.append(")")
+                        elif data == "[":
+                            balancing_stack.append("]")
+                        elif data in ("}", ")", "]"):
+                            if not balancing_stack:
+                                raise TemplateSyntaxError(
+                                    f"unexpected '{data}'", lineno, name, filename
+                                )
+
+                            expected_op = balancing_stack.pop()
+
+                            if expected_op != data:
+                                raise TemplateSyntaxError(
+                                    f"unexpected '{data}', expected '{expected_op}'",
+                                    lineno,
+                                    name,
+                                    filename,
+                                )
+
+                    # yield items
+                    if data or tokens not in ignore_if_empty:
+                        yield lineno, tokens, data
+
+                    lineno += data.count("\n")
+
+                line_starting = m.group()[-1:] == "\n"
+                # fetch new position into new variable so that we can check
+                # if there is a internal parsing error which would result
+                # in an infinite loop
+                pos2 = m.end()
+
+                # handle state changes
+                if new_state is not None:
+                    # remove the uppermost state
+                    if new_state == "#pop":
+                        stack.pop()
+                    # resolve the new state by group checking
+                    elif new_state == "#bygroup":
+                        for key, value in m.groupdict().items():
+                            if value is not None:
+                                stack.append(key)
+                                break
+                        else:
+                            raise RuntimeError(
+                                f"{regex!r} wanted to resolve the new state dynamically"
+                                f" but no group matched"
+                            )
+                    # direct state name given
+                    else:
+                        stack.append(new_state)
+
+                    statetokens = self.rules[stack[-1]]
+                # we are still at the same position and no stack change.
+                # this means a loop without break condition, avoid that and
+                # raise error
+                elif pos2 == pos:
+                    raise RuntimeError(
+                        f"{regex!r} yielded empty string without stack change"
+                    )
+
+                # publish new function and start again
+                pos = pos2
+                break
+            # if loop terminated without break we haven't found a single match
+            # either we are at the end of the file or we have a problem
+            else:
+                # end of text
+                if pos >= source_length:
+                    return
+
+                # something went wrong
+                raise TemplateSyntaxError(
+                    f"unexpected char {source[pos]!r} at {pos}", lineno, name, filename
+                )
diff --git a/venv/lib/python3.9/site-packages/jinja2/loaders.py b/venv/lib/python3.9/site-packages/jinja2/loaders.py
new file mode 100644
index 0000000..d2f9809
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/loaders.py
@@ -0,0 +1,661 @@
+"""API and implementations for loading templates from different data
+sources.
+"""
+import importlib.util
+import os
+import posixpath
+import sys
+import typing as t
+import weakref
+import zipimport
+from collections import abc
+from hashlib import sha1
+from importlib import import_module
+from types import ModuleType
+
+from .exceptions import TemplateNotFound
+from .utils import internalcode
+from .utils import open_if_exists
+
+if t.TYPE_CHECKING:
+    from .environment import Environment
+    from .environment import Template
+
+
+def split_template_path(template: str) -> t.List[str]:
+    """Split a path into segments and perform a sanity check.  If it detects
+    '..' in the path it will raise a `TemplateNotFound` error.
+    """
+    pieces = []
+    for piece in template.split("/"):
+        if (
+            os.path.sep in piece
+            or (os.path.altsep and os.path.altsep in piece)
+            or piece == os.path.pardir
+        ):
+            raise TemplateNotFound(template)
+        elif piece and piece != ".":
+            pieces.append(piece)
+    return pieces
+
+
+class BaseLoader:
+    """Baseclass for all loaders.  Subclass this and override `get_source` to
+    implement a custom loading mechanism.  The environment provides a
+    `get_template` method that calls the loader's `load` method to get the
+    :class:`Template` object.
+
+    A very basic example for a loader that looks up templates on the file
+    system could look like this::
+
+        from jinja2 import BaseLoader, TemplateNotFound
+        from os.path import join, exists, getmtime
+
+        class MyLoader(BaseLoader):
+
+            def __init__(self, path):
+                self.path = path
+
+            def get_source(self, environment, template):
+                path = join(self.path, template)
+                if not exists(path):
+                    raise TemplateNotFound(template)
+                mtime = getmtime(path)
+                with open(path) as f:
+                    source = f.read()
+                return source, path, lambda: mtime == getmtime(path)
+    """
+
+    #: if set to `False` it indicates that the loader cannot provide access
+    #: to the source of templates.
+    #:
+    #: .. versionadded:: 2.4
+    has_source_access = True
+
+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+        """Get the template source, filename and reload helper for a template.
+        It's passed the environment and template name and has to return a
+        tuple in the form ``(source, filename, uptodate)`` or raise a
+        `TemplateNotFound` error if it can't locate the template.
+
+        The source part of the returned tuple must be the source of the
+        template as a string. The filename should be the name of the
+        file on the filesystem if it was loaded from there, otherwise
+        ``None``. The filename is used by Python for the tracebacks
+        if no loader extension is used.
+
+        The last item in the tuple is the `uptodate` function.  If auto
+        reloading is enabled it's always called to check if the template
+        changed.  No arguments are passed so the function must store the
+        old state somewhere (for example in a closure).  If it returns `False`
+        the template will be reloaded.
+        """
+        if not self.has_source_access:
+            raise RuntimeError(
+                f"{type(self).__name__} cannot provide access to the source"
+            )
+        raise TemplateNotFound(template)
+
+    def list_templates(self) -> t.List[str]:
+        """Iterates over all templates.  If the loader does not support that
+        it should raise a :exc:`TypeError` which is the default behavior.
+        """
+        raise TypeError("this loader cannot iterate over all templates")
+
+    @internalcode
+    def load(
+        self,
+        environment: "Environment",
+        name: str,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
+        """Loads a template.  This method looks up the template in the cache
+        or loads one by calling :meth:`get_source`.  Subclasses should not
+        override this method as loaders working on collections of other
+        loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
+        will not call this method but `get_source` directly.
+        """
+        code = None
+        if globals is None:
+            globals = {}
+
+        # first we try to get the source for this template together
+        # with the filename and the uptodate function.
+        source, filename, uptodate = self.get_source(environment, name)
+
+        # try to load the code from the bytecode cache if there is a
+        # bytecode cache configured.
+        bcc = environment.bytecode_cache
+        if bcc is not None:
+            bucket = bcc.get_bucket(environment, name, filename, source)
+            code = bucket.code
+
+        # if we don't have code so far (not cached, no longer up to
+        # date) etc. we compile the template
+        if code is None:
+            code = environment.compile(source, name, filename)
+
+        # if the bytecode cache is available and the bucket doesn't
+        # have a code so far, we give the bucket the new code and put
+        # it back to the bytecode cache.
+        if bcc is not None and bucket.code is None:
+            bucket.code = code
+            bcc.set_bucket(bucket)
+
+        return environment.template_class.from_code(
+            environment, code, globals, uptodate
+        )
+
+
+class FileSystemLoader(BaseLoader):
+    """Load templates from a directory in the file system.
+
+    The path can be relative or absolute. Relative paths are relative to
+    the current working directory.
+
+    .. code-block:: python
+
+        loader = FileSystemLoader("templates")
+
+    A list of paths can be given. The directories will be searched in
+    order, stopping at the first matching template.
+
+    .. code-block:: python
+
+        loader = FileSystemLoader(["/override/templates", "/default/templates"])
+
+    :param searchpath: A path, or list of paths, to the directory that
+        contains the templates.
+    :param encoding: Use this encoding to read the text from template
+        files.
+    :param followlinks: Follow symbolic links in the path.
+
+    .. versionchanged:: 2.8
+        Added the ``followlinks`` parameter.
+    """
+
+    def __init__(
+        self,
+        searchpath: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]],
+        encoding: str = "utf-8",
+        followlinks: bool = False,
+    ) -> None:
+        if not isinstance(searchpath, abc.Iterable) or isinstance(searchpath, str):
+            searchpath = [searchpath]
+
+        self.searchpath = [os.fspath(p) for p in searchpath]
+        self.encoding = encoding
+        self.followlinks = followlinks
+
+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, str, t.Callable[[], bool]]:
+        pieces = split_template_path(template)
+        for searchpath in self.searchpath:
+            # Use posixpath even on Windows to avoid "drive:" or UNC
+            # segments breaking out of the search directory.
+            filename = posixpath.join(searchpath, *pieces)
+            f = open_if_exists(filename)
+            if f is None:
+                continue
+            try:
+                contents = f.read().decode(self.encoding)
+            finally:
+                f.close()
+
+            mtime = os.path.getmtime(filename)
+
+            def uptodate() -> bool:
+                try:
+                    return os.path.getmtime(filename) == mtime
+                except OSError:
+                    return False
+
+            # Use normpath to convert Windows altsep to sep.
+            return contents, os.path.normpath(filename), uptodate
+        raise TemplateNotFound(template)
+
+    def list_templates(self) -> t.List[str]:
+        found = set()
+        for searchpath in self.searchpath:
+            walk_dir = os.walk(searchpath, followlinks=self.followlinks)
+            for dirpath, _, filenames in walk_dir:
+                for filename in filenames:
+                    template = (
+                        os.path.join(dirpath, filename)[len(searchpath) :]
+                        .strip(os.path.sep)
+                        .replace(os.path.sep, "/")
+                    )
+                    if template[:2] == "./":
+                        template = template[2:]
+                    if template not in found:
+                        found.add(template)
+        return sorted(found)
+
+
+class PackageLoader(BaseLoader):
+    """Load templates from a directory in a Python package.
+
+    :param package_name: Import name of the package that contains the
+        template directory.
+    :param package_path: Directory within the imported package that
+        contains the templates.
+    :param encoding: Encoding of template files.
+
+    The following example looks up templates in the ``pages`` directory
+    within the ``project.ui`` package.
+
+    .. code-block:: python
+
+        loader = PackageLoader("project.ui", "pages")
+
+    Only packages installed as directories (standard pip behavior) or
+    zip/egg files (less common) are supported. The Python API for
+    introspecting data in packages is too limited to support other
+    installation methods the way this loader requires.
+
+    There is limited support for :pep:`420` namespace packages. The
+    template directory is assumed to only be in one namespace
+    contributor. Zip files contributing to a namespace are not
+    supported.
+
+    .. versionchanged:: 3.0
+        No longer uses ``setuptools`` as a dependency.
+
+    .. versionchanged:: 3.0
+        Limited PEP 420 namespace package support.
+    """
+
+    def __init__(
+        self,
+        package_name: str,
+        package_path: "str" = "templates",
+        encoding: str = "utf-8",
+    ) -> None:
+        package_path = os.path.normpath(package_path).rstrip(os.path.sep)
+
+        # normpath preserves ".", which isn't valid in zip paths.
+        if package_path == os.path.curdir:
+            package_path = ""
+        elif package_path[:2] == os.path.curdir + os.path.sep:
+            package_path = package_path[2:]
+
+        self.package_path = package_path
+        self.package_name = package_name
+        self.encoding = encoding
+
+        # Make sure the package exists. This also makes namespace
+        # packages work, otherwise get_loader returns None.
+        import_module(package_name)
+        spec = importlib.util.find_spec(package_name)
+        assert spec is not None, "An import spec was not found for the package."
+        loader = spec.loader
+        assert loader is not None, "A loader was not found for the package."
+        self._loader = loader
+        self._archive = None
+        template_root = None
+
+        if isinstance(loader, zipimport.zipimporter):
+            self._archive = loader.archive
+            pkgdir = next(iter(spec.submodule_search_locations))  # type: ignore
+            template_root = os.path.join(pkgdir, package_path).rstrip(os.path.sep)
+        else:
+            roots: t.List[str] = []
+
+            # One element for regular packages, multiple for namespace
+            # packages, or None for single module file.
+            if spec.submodule_search_locations:
+                roots.extend(spec.submodule_search_locations)
+            # A single module file, use the parent directory instead.
+            elif spec.origin is not None:
+                roots.append(os.path.dirname(spec.origin))
+
+            for root in roots:
+                root = os.path.join(root, package_path)
+
+                if os.path.isdir(root):
+                    template_root = root
+                    break
+
+        if template_root is None:
+            raise ValueError(
+                f"The {package_name!r} package was not installed in a"
+                " way that PackageLoader understands."
+            )
+
+        self._template_root = template_root
+
+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, str, t.Optional[t.Callable[[], bool]]]:
+        # Use posixpath even on Windows to avoid "drive:" or UNC
+        # segments breaking out of the search directory. Use normpath to
+        # convert Windows altsep to sep.
+        p = os.path.normpath(
+            posixpath.join(self._template_root, *split_template_path(template))
+        )
+        up_to_date: t.Optional[t.Callable[[], bool]]
+
+        if self._archive is None:
+            # Package is a directory.
+            if not os.path.isfile(p):
+                raise TemplateNotFound(template)
+
+            with open(p, "rb") as f:
+                source = f.read()
+
+            mtime = os.path.getmtime(p)
+
+            def up_to_date() -> bool:
+                return os.path.isfile(p) and os.path.getmtime(p) == mtime
+
+        else:
+            # Package is a zip file.
+            try:
+                source = self._loader.get_data(p)  # type: ignore
+            except OSError as e:
+                raise TemplateNotFound(template) from e
+
+            # Could use the zip's mtime for all template mtimes, but
+            # would need to safely reload the module if it's out of
+            # date, so just report it as always current.
+            up_to_date = None
+
+        return source.decode(self.encoding), p, up_to_date
+
+    def list_templates(self) -> t.List[str]:
+        results: t.List[str] = []
+
+        if self._archive is None:
+            # Package is a directory.
+            offset = len(self._template_root)
+
+            for dirpath, _, filenames in os.walk(self._template_root):
+                dirpath = dirpath[offset:].lstrip(os.path.sep)
+                results.extend(
+                    os.path.join(dirpath, name).replace(os.path.sep, "/")
+                    for name in filenames
+                )
+        else:
+            if not hasattr(self._loader, "_files"):
+                raise TypeError(
+                    "This zip import does not have the required"
+                    " metadata to list templates."
+                )
+
+            # Package is a zip file.
+            prefix = (
+                self._template_root[len(self._archive) :].lstrip(os.path.sep)
+                + os.path.sep
+            )
+            offset = len(prefix)
+
+            for name in self._loader._files.keys():  # type: ignore
+                # Find names under the templates directory that aren't directories.
+                if name.startswith(prefix) and name[-1] != os.path.sep:
+                    results.append(name[offset:].replace(os.path.sep, "/"))
+
+        results.sort()
+        return results
+
+
+class DictLoader(BaseLoader):
+    """Loads a template from a Python dict mapping template names to
+    template source.  This loader is useful for unittesting:
+
+    >>> loader = DictLoader({'index.html': 'source here'})
+
+    Because auto reloading is rarely useful this is disabled per default.
+    """
+
+    def __init__(self, mapping: t.Mapping[str, str]) -> None:
+        self.mapping = mapping
+
+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, None, t.Callable[[], bool]]:
+        if template in self.mapping:
+            source = self.mapping[template]
+            return source, None, lambda: source == self.mapping.get(template)
+        raise TemplateNotFound(template)
+
+    def list_templates(self) -> t.List[str]:
+        return sorted(self.mapping)
+
+
+class FunctionLoader(BaseLoader):
+    """A loader that is passed a function which does the loading.  The
+    function receives the name of the template and has to return either
+    a string with the template source, a tuple in the form ``(source,
+    filename, uptodatefunc)`` or `None` if the template does not exist.
+
+    >>> def load_template(name):
+    ...     if name == 'index.html':
+    ...         return '...'
+    ...
+    >>> loader = FunctionLoader(load_template)
+
+    The `uptodatefunc` is a function that is called if autoreload is enabled
+    and has to return `True` if the template is still up to date.  For more
+    details have a look at :meth:`BaseLoader.get_source` which has the same
+    return value.
+    """
+
+    def __init__(
+        self,
+        load_func: t.Callable[
+            [str],
+            t.Optional[
+                t.Union[
+                    str, t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]
+                ]
+            ],
+        ],
+    ) -> None:
+        self.load_func = load_func
+
+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+        rv = self.load_func(template)
+
+        if rv is None:
+            raise TemplateNotFound(template)
+
+        if isinstance(rv, str):
+            return rv, None, None
+
+        return rv
+
+
+class PrefixLoader(BaseLoader):
+    """A loader that is passed a dict of loaders where each loader is bound
+    to a prefix.  The prefix is delimited from the template by a slash per
+    default, which can be changed by setting the `delimiter` argument to
+    something else::
+
+        loader = PrefixLoader({
+            'app1':     PackageLoader('mypackage.app1'),
+            'app2':     PackageLoader('mypackage.app2')
+        })
+
+    By loading ``'app1/index.html'`` the file from the app1 package is loaded,
+    by loading ``'app2/index.html'`` the file from the second.
+    """
+
+    def __init__(
+        self, mapping: t.Mapping[str, BaseLoader], delimiter: str = "/"
+    ) -> None:
+        self.mapping = mapping
+        self.delimiter = delimiter
+
+    def get_loader(self, template: str) -> t.Tuple[BaseLoader, str]:
+        try:
+            prefix, name = template.split(self.delimiter, 1)
+            loader = self.mapping[prefix]
+        except (ValueError, KeyError) as e:
+            raise TemplateNotFound(template) from e
+        return loader, name
+
+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+        loader, name = self.get_loader(template)
+        try:
+            return loader.get_source(environment, name)
+        except TemplateNotFound as e:
+            # re-raise the exception with the correct filename here.
+            # (the one that includes the prefix)
+            raise TemplateNotFound(template) from e
+
+    @internalcode
+    def load(
+        self,
+        environment: "Environment",
+        name: str,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
+        loader, local_name = self.get_loader(name)
+        try:
+            return loader.load(environment, local_name, globals)
+        except TemplateNotFound as e:
+            # re-raise the exception with the correct filename here.
+            # (the one that includes the prefix)
+            raise TemplateNotFound(name) from e
+
+    def list_templates(self) -> t.List[str]:
+        result = []
+        for prefix, loader in self.mapping.items():
+            for template in loader.list_templates():
+                result.append(prefix + self.delimiter + template)
+        return result
+
+
+class ChoiceLoader(BaseLoader):
+    """This loader works like the `PrefixLoader` just that no prefix is
+    specified.  If a template could not be found by one loader the next one
+    is tried.
+
+    >>> loader = ChoiceLoader([
+    ...     FileSystemLoader('/path/to/user/templates'),
+    ...     FileSystemLoader('/path/to/system/templates')
+    ... ])
+
+    This is useful if you want to allow users to override builtin templates
+    from a different location.
+    """
+
+    def __init__(self, loaders: t.Sequence[BaseLoader]) -> None:
+        self.loaders = loaders
+
+    def get_source(
+        self, environment: "Environment", template: str
+    ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
+        for loader in self.loaders:
+            try:
+                return loader.get_source(environment, template)
+            except TemplateNotFound:
+                pass
+        raise TemplateNotFound(template)
+
+    @internalcode
+    def load(
+        self,
+        environment: "Environment",
+        name: str,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
+        for loader in self.loaders:
+            try:
+                return loader.load(environment, name, globals)
+            except TemplateNotFound:
+                pass
+        raise TemplateNotFound(name)
+
+    def list_templates(self) -> t.List[str]:
+        found = set()
+        for loader in self.loaders:
+            found.update(loader.list_templates())
+        return sorted(found)
+
+
+class _TemplateModule(ModuleType):
+    """Like a normal module but with support for weak references"""
+
+
+class ModuleLoader(BaseLoader):
+    """This loader loads templates from precompiled templates.
+
+    Example usage:
+
+    >>> loader = ChoiceLoader([
+    ...     ModuleLoader('/path/to/compiled/templates'),
+    ...     FileSystemLoader('/path/to/templates')
+    ... ])
+
+    Templates can be precompiled with :meth:`Environment.compile_templates`.
+    """
+
+    has_source_access = False
+
+    def __init__(
+        self, path: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]]
+    ) -> None:
+        package_name = f"_jinja2_module_templates_{id(self):x}"
+
+        # create a fake module that looks for the templates in the
+        # path given.
+        mod = _TemplateModule(package_name)
+
+        if not isinstance(path, abc.Iterable) or isinstance(path, str):
+            path = [path]
+
+        mod.__path__ = [os.fspath(p) for p in path]
+
+        sys.modules[package_name] = weakref.proxy(
+            mod, lambda x: sys.modules.pop(package_name, None)
+        )
+
+        # the only strong reference, the sys.modules entry is weak
+        # so that the garbage collector can remove it once the
+        # loader that created it goes out of business.
+        self.module = mod
+        self.package_name = package_name
+
+    @staticmethod
+    def get_template_key(name: str) -> str:
+        return "tmpl_" + sha1(name.encode("utf-8")).hexdigest()
+
+    @staticmethod
+    def get_module_filename(name: str) -> str:
+        return ModuleLoader.get_template_key(name) + ".py"
+
+    @internalcode
+    def load(
+        self,
+        environment: "Environment",
+        name: str,
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ) -> "Template":
+        key = self.get_template_key(name)
+        module = f"{self.package_name}.{key}"
+        mod = getattr(self.module, module, None)
+
+        if mod is None:
+            try:
+                mod = __import__(module, None, None, ["root"])
+            except ImportError as e:
+                raise TemplateNotFound(name) from e
+
+            # remove the entry from sys.modules, we only want the attribute
+            # on the module object we have stored on the loader.
+            sys.modules.pop(module, None)
+
+        if globals is None:
+            globals = {}
+
+        return environment.template_class.from_module_dict(
+            environment, mod.__dict__, globals
+        )
diff --git a/venv/lib/python3.9/site-packages/jinja2/meta.py b/venv/lib/python3.9/site-packages/jinja2/meta.py
new file mode 100644
index 0000000..0057d6e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/meta.py
@@ -0,0 +1,111 @@
+"""Functions that expose information about templates that might be
+interesting for introspection.
+"""
+import typing as t
+
+from . import nodes
+from .compiler import CodeGenerator
+from .compiler import Frame
+
+if t.TYPE_CHECKING:
+    from .environment import Environment
+
+
+class TrackingCodeGenerator(CodeGenerator):
+    """We abuse the code generator for introspection."""
+
+    def __init__(self, environment: "Environment") -> None:
+        super().__init__(environment, "<introspection>", "<introspection>")
+        self.undeclared_identifiers: t.Set[str] = set()
+
+    def write(self, x: str) -> None:
+        """Don't write."""
+
+    def enter_frame(self, frame: Frame) -> None:
+        """Remember all undeclared identifiers."""
+        super().enter_frame(frame)
+
+        for _, (action, param) in frame.symbols.loads.items():
+            if action == "resolve" and param not in self.environment.globals:
+                self.undeclared_identifiers.add(param)
+
+
+def find_undeclared_variables(ast: nodes.Template) -> t.Set[str]:
+    """Returns a set of all variables in the AST that will be looked up from
+    the context at runtime.  Because at compile time it's not known which
+    variables will be used depending on the path the execution takes at
+    runtime, all variables are returned.
+
+    >>> from jinja2 import Environment, meta
+    >>> env = Environment()
+    >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
+    >>> meta.find_undeclared_variables(ast) == {'bar'}
+    True
+
+    .. admonition:: Implementation
+
+       Internally the code generator is used for finding undeclared variables.
+       This is good to know because the code generator might raise a
+       :exc:`TemplateAssertionError` during compilation and as a matter of
+       fact this function can currently raise that exception as well.
+    """
+    codegen = TrackingCodeGenerator(ast.environment)  # type: ignore
+    codegen.visit(ast)
+    return codegen.undeclared_identifiers
+
+
+_ref_types = (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
+_RefType = t.Union[nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include]
+
+
+def find_referenced_templates(ast: nodes.Template) -> t.Iterator[t.Optional[str]]:
+    """Finds all the referenced templates from the AST.  This will return an
+    iterator over all the hardcoded template extensions, inclusions and
+    imports.  If dynamic inheritance or inclusion is used, `None` will be
+    yielded.
+
+    >>> from jinja2 import Environment, meta
+    >>> env = Environment()
+    >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
+    >>> list(meta.find_referenced_templates(ast))
+    ['layout.html', None]
+
+    This function is useful for dependency tracking.  For example if you want
+    to rebuild parts of the website after a layout template has changed.
+    """
+    template_name: t.Any
+
+    for node in ast.find_all(_ref_types):
+        template: nodes.Expr = node.template  # type: ignore
+
+        if not isinstance(template, nodes.Const):
+            # a tuple with some non consts in there
+            if isinstance(template, (nodes.Tuple, nodes.List)):
+                for template_name in template.items:
+                    # something const, only yield the strings and ignore
+                    # non-string consts that really just make no sense
+                    if isinstance(template_name, nodes.Const):
+                        if isinstance(template_name.value, str):
+                            yield template_name.value
+                    # something dynamic in there
+                    else:
+                        yield None
+            # something dynamic we don't know about here
+            else:
+                yield None
+            continue
+        # constant is a basestring, direct template name
+        if isinstance(template.value, str):
+            yield template.value
+        # a tuple or list (latter *should* not happen) made of consts,
+        # yield the consts that are strings.  We could warn here for
+        # non string values
+        elif isinstance(node, nodes.Include) and isinstance(
+            template.value, (tuple, list)
+        ):
+            for template_name in template.value:
+                if isinstance(template_name, str):
+                    yield template_name
+        # something else we don't care about, we could warn here
+        else:
+            yield None
diff --git a/venv/lib/python3.9/site-packages/jinja2/nativetypes.py b/venv/lib/python3.9/site-packages/jinja2/nativetypes.py
new file mode 100644
index 0000000..ac08610
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/nativetypes.py
@@ -0,0 +1,130 @@
+import typing as t
+from ast import literal_eval
+from ast import parse
+from itertools import chain
+from itertools import islice
+from types import GeneratorType
+
+from . import nodes
+from .compiler import CodeGenerator
+from .compiler import Frame
+from .compiler import has_safe_repr
+from .environment import Environment
+from .environment import Template
+
+
+def native_concat(values: t.Iterable[t.Any]) -> t.Optional[t.Any]:
+    """Return a native Python type from the list of compiled nodes. If
+    the result is a single node, its value is returned. Otherwise, the
+    nodes are concatenated as strings. If the result can be parsed with
+    :func:`ast.literal_eval`, the parsed value is returned. Otherwise,
+    the string is returned.
+
+    :param values: Iterable of outputs to concatenate.
+    """
+    head = list(islice(values, 2))
+
+    if not head:
+        return None
+
+    if len(head) == 1:
+        raw = head[0]
+        if not isinstance(raw, str):
+            return raw
+    else:
+        if isinstance(values, GeneratorType):
+            values = chain(head, values)
+        raw = "".join([str(v) for v in values])
+
+    try:
+        return literal_eval(
+            # In Python 3.10+ ast.literal_eval removes leading spaces/tabs
+            # from the given string. For backwards compatibility we need to
+            # parse the string ourselves without removing leading spaces/tabs.
+            parse(raw, mode="eval")
+        )
+    except (ValueError, SyntaxError, MemoryError):
+        return raw
+
+
+class NativeCodeGenerator(CodeGenerator):
+    """A code generator which renders Python types by not adding
+    ``str()`` around output nodes.
+    """
+
+    @staticmethod
+    def _default_finalize(value: t.Any) -> t.Any:
+        return value
+
+    def _output_const_repr(self, group: t.Iterable[t.Any]) -> str:
+        return repr("".join([str(v) for v in group]))
+
+    def _output_child_to_const(
+        self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
+    ) -> t.Any:
+        const = node.as_const(frame.eval_ctx)
+
+        if not has_safe_repr(const):
+            raise nodes.Impossible()
+
+        if isinstance(node, nodes.TemplateData):
+            return const
+
+        return finalize.const(const)  # type: ignore
+
+    def _output_child_pre(
+        self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
+    ) -> None:
+        if finalize.src is not None:
+            self.write(finalize.src)
+
+    def _output_child_post(
+        self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
+    ) -> None:
+        if finalize.src is not None:
+            self.write(")")
+
+
+class NativeEnvironment(Environment):
+    """An environment that renders templates to native Python types."""
+
+    code_generator_class = NativeCodeGenerator
+    concat = staticmethod(native_concat)  # type: ignore
+
+
+class NativeTemplate(Template):
+    environment_class = NativeEnvironment
+
+    def render(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
+        """Render the template to produce a native Python type. If the
+        result is a single node, its value is returned. Otherwise, the
+        nodes are concatenated as strings. If the result can be parsed
+        with :func:`ast.literal_eval`, the parsed value is returned.
+        Otherwise, the string is returned.
+        """
+        ctx = self.new_context(dict(*args, **kwargs))
+
+        try:
+            return self.environment_class.concat(  # type: ignore
+                self.root_render_func(ctx)  # type: ignore
+            )
+        except Exception:
+            return self.environment.handle_exception()
+
+    async def render_async(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
+        if not self.environment.is_async:
+            raise RuntimeError(
+                "The environment was not created with async mode enabled."
+            )
+
+        ctx = self.new_context(dict(*args, **kwargs))
+
+        try:
+            return self.environment_class.concat(  # type: ignore
+                [n async for n in self.root_render_func(ctx)]  # type: ignore
+            )
+        except Exception:
+            return self.environment.handle_exception()
+
+
+NativeEnvironment.template_class = NativeTemplate
diff --git a/venv/lib/python3.9/site-packages/jinja2/nodes.py b/venv/lib/python3.9/site-packages/jinja2/nodes.py
new file mode 100644
index 0000000..b2f88d9
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/nodes.py
@@ -0,0 +1,1204 @@
+"""AST nodes generated by the parser for the compiler. Also provides
+some node tree helper functions used by the parser and compiler in order
+to normalize nodes.
+"""
+import inspect
+import operator
+import typing as t
+from collections import deque
+
+from markupsafe import Markup
+
+from .utils import _PassArg
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .environment import Environment
+
+_NodeBound = t.TypeVar("_NodeBound", bound="Node")
+
+_binop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {
+    "*": operator.mul,
+    "/": operator.truediv,
+    "//": operator.floordiv,
+    "**": operator.pow,
+    "%": operator.mod,
+    "+": operator.add,
+    "-": operator.sub,
+}
+
+_uaop_to_func: t.Dict[str, t.Callable[[t.Any], t.Any]] = {
+    "not": operator.not_,
+    "+": operator.pos,
+    "-": operator.neg,
+}
+
+_cmpop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {
+    "eq": operator.eq,
+    "ne": operator.ne,
+    "gt": operator.gt,
+    "gteq": operator.ge,
+    "lt": operator.lt,
+    "lteq": operator.le,
+    "in": lambda a, b: a in b,
+    "notin": lambda a, b: a not in b,
+}
+
+
+class Impossible(Exception):
+    """Raised if the node could not perform a requested action."""
+
+
+class NodeType(type):
+    """A metaclass for nodes that handles the field and attribute
+    inheritance.  fields and attributes from the parent class are
+    automatically forwarded to the child."""
+
+    def __new__(mcs, name, bases, d):  # type: ignore
+        for attr in "fields", "attributes":
+            storage = []
+            storage.extend(getattr(bases[0] if bases else object, attr, ()))
+            storage.extend(d.get(attr, ()))
+            assert len(bases) <= 1, "multiple inheritance not allowed"
+            assert len(storage) == len(set(storage)), "layout conflict"
+            d[attr] = tuple(storage)
+        d.setdefault("abstract", False)
+        return type.__new__(mcs, name, bases, d)
+
+
+class EvalContext:
+    """Holds evaluation time information.  Custom attributes can be attached
+    to it in extensions.
+    """
+
+    def __init__(
+        self, environment: "Environment", template_name: t.Optional[str] = None
+    ) -> None:
+        self.environment = environment
+        if callable(environment.autoescape):
+            self.autoescape = environment.autoescape(template_name)
+        else:
+            self.autoescape = environment.autoescape
+        self.volatile = False
+
+    def save(self) -> t.Mapping[str, t.Any]:
+        return self.__dict__.copy()
+
+    def revert(self, old: t.Mapping[str, t.Any]) -> None:
+        self.__dict__.clear()
+        self.__dict__.update(old)
+
+
+def get_eval_context(node: "Node", ctx: t.Optional[EvalContext]) -> EvalContext:
+    if ctx is None:
+        if node.environment is None:
+            raise RuntimeError(
+                "if no eval context is passed, the node must have an"
+                " attached environment."
+            )
+        return EvalContext(node.environment)
+    return ctx
+
+
+class Node(metaclass=NodeType):
+    """Baseclass for all Jinja nodes.  There are a number of nodes available
+    of different types.  There are four major types:
+
+    -   :class:`Stmt`: statements
+    -   :class:`Expr`: expressions
+    -   :class:`Helper`: helper nodes
+    -   :class:`Template`: the outermost wrapper node
+
+    All nodes have fields and attributes.  Fields may be other nodes, lists,
+    or arbitrary values.  Fields are passed to the constructor as regular
+    positional arguments, attributes as keyword arguments.  Each node has
+    two attributes: `lineno` (the line number of the node) and `environment`.
+    The `environment` attribute is set at the end of the parsing process for
+    all nodes automatically.
+    """
+
+    fields: t.Tuple[str, ...] = ()
+    attributes: t.Tuple[str, ...] = ("lineno", "environment")
+    abstract = True
+
+    lineno: int
+    environment: t.Optional["Environment"]
+
+    def __init__(self, *fields: t.Any, **attributes: t.Any) -> None:
+        if self.abstract:
+            raise TypeError("abstract nodes are not instantiable")
+        if fields:
+            if len(fields) != len(self.fields):
+                if not self.fields:
+                    raise TypeError(f"{type(self).__name__!r} takes 0 arguments")
+                raise TypeError(
+                    f"{type(self).__name__!r} takes 0 or {len(self.fields)}"
+                    f" argument{'s' if len(self.fields) != 1 else ''}"
+                )
+            for name, arg in zip(self.fields, fields):
+                setattr(self, name, arg)
+        for attr in self.attributes:
+            setattr(self, attr, attributes.pop(attr, None))
+        if attributes:
+            raise TypeError(f"unknown attribute {next(iter(attributes))!r}")
+
+    def iter_fields(
+        self,
+        exclude: t.Optional[t.Container[str]] = None,
+        only: t.Optional[t.Container[str]] = None,
+    ) -> t.Iterator[t.Tuple[str, t.Any]]:
+        """This method iterates over all fields that are defined and yields
+        ``(key, value)`` tuples.  Per default all fields are returned, but
+        it's possible to limit that to some fields by providing the `only`
+        parameter or to exclude some using the `exclude` parameter.  Both
+        should be sets or tuples of field names.
+        """
+        for name in self.fields:
+            if (
+                (exclude is None and only is None)
+                or (exclude is not None and name not in exclude)
+                or (only is not None and name in only)
+            ):
+                try:
+                    yield name, getattr(self, name)
+                except AttributeError:
+                    pass
+
+    def iter_child_nodes(
+        self,
+        exclude: t.Optional[t.Container[str]] = None,
+        only: t.Optional[t.Container[str]] = None,
+    ) -> t.Iterator["Node"]:
+        """Iterates over all direct child nodes of the node.  This iterates
+        over all fields and yields the values of they are nodes.  If the value
+        of a field is a list all the nodes in that list are returned.
+        """
+        for _, item in self.iter_fields(exclude, only):
+            if isinstance(item, list):
+                for n in item:
+                    if isinstance(n, Node):
+                        yield n
+            elif isinstance(item, Node):
+                yield item
+
+    def find(self, node_type: t.Type[_NodeBound]) -> t.Optional[_NodeBound]:
+        """Find the first node of a given type.  If no such node exists the
+        return value is `None`.
+        """
+        for result in self.find_all(node_type):
+            return result
+
+        return None
+
+    def find_all(
+        self, node_type: t.Union[t.Type[_NodeBound], t.Tuple[t.Type[_NodeBound], ...]]
+    ) -> t.Iterator[_NodeBound]:
+        """Find all the nodes of a given type.  If the type is a tuple,
+        the check is performed for any of the tuple items.
+        """
+        for child in self.iter_child_nodes():
+            if isinstance(child, node_type):
+                yield child  # type: ignore
+            yield from child.find_all(node_type)
+
+    def set_ctx(self, ctx: str) -> "Node":
+        """Reset the context of a node and all child nodes.  Per default the
+        parser will all generate nodes that have a 'load' context as it's the
+        most common one.  This method is used in the parser to set assignment
+        targets and other nodes to a store context.
+        """
+        todo = deque([self])
+        while todo:
+            node = todo.popleft()
+            if "ctx" in node.fields:
+                node.ctx = ctx  # type: ignore
+            todo.extend(node.iter_child_nodes())
+        return self
+
+    def set_lineno(self, lineno: int, override: bool = False) -> "Node":
+        """Set the line numbers of the node and children."""
+        todo = deque([self])
+        while todo:
+            node = todo.popleft()
+            if "lineno" in node.attributes:
+                if node.lineno is None or override:
+                    node.lineno = lineno
+            todo.extend(node.iter_child_nodes())
+        return self
+
+    def set_environment(self, environment: "Environment") -> "Node":
+        """Set the environment for all nodes."""
+        todo = deque([self])
+        while todo:
+            node = todo.popleft()
+            node.environment = environment
+            todo.extend(node.iter_child_nodes())
+        return self
+
+    def __eq__(self, other: t.Any) -> bool:
+        if type(self) is not type(other):
+            return NotImplemented
+
+        return tuple(self.iter_fields()) == tuple(other.iter_fields())
+
+    __hash__ = object.__hash__
+
+    def __repr__(self) -> str:
+        args_str = ", ".join(f"{a}={getattr(self, a, None)!r}" for a in self.fields)
+        return f"{type(self).__name__}({args_str})"
+
+    def dump(self) -> str:
+        def _dump(node: t.Union[Node, t.Any]) -> None:
+            if not isinstance(node, Node):
+                buf.append(repr(node))
+                return
+
+            buf.append(f"nodes.{type(node).__name__}(")
+            if not node.fields:
+                buf.append(")")
+                return
+            for idx, field in enumerate(node.fields):
+                if idx:
+                    buf.append(", ")
+                value = getattr(node, field)
+                if isinstance(value, list):
+                    buf.append("[")
+                    for idx, item in enumerate(value):
+                        if idx:
+                            buf.append(", ")
+                        _dump(item)
+                    buf.append("]")
+                else:
+                    _dump(value)
+            buf.append(")")
+
+        buf: t.List[str] = []
+        _dump(self)
+        return "".join(buf)
+
+
+class Stmt(Node):
+    """Base node for all statements."""
+
+    abstract = True
+
+
+class Helper(Node):
+    """Nodes that exist in a specific context only."""
+
+    abstract = True
+
+
+class Template(Node):
+    """Node that represents a template.  This must be the outermost node that
+    is passed to the compiler.
+    """
+
+    fields = ("body",)
+    body: t.List[Node]
+
+
+class Output(Stmt):
+    """A node that holds multiple expressions which are then printed out.
+    This is used both for the `print` statement and the regular template data.
+    """
+
+    fields = ("nodes",)
+    nodes: t.List["Expr"]
+
+
+class Extends(Stmt):
+    """Represents an extends statement."""
+
+    fields = ("template",)
+    template: "Expr"
+
+
+class For(Stmt):
+    """The for loop.  `target` is the target for the iteration (usually a
+    :class:`Name` or :class:`Tuple`), `iter` the iterable.  `body` is a list
+    of nodes that are used as loop-body, and `else_` a list of nodes for the
+    `else` block.  If no else node exists it has to be an empty list.
+
+    For filtered nodes an expression can be stored as `test`, otherwise `None`.
+    """
+
+    fields = ("target", "iter", "body", "else_", "test", "recursive")
+    target: Node
+    iter: Node
+    body: t.List[Node]
+    else_: t.List[Node]
+    test: t.Optional[Node]
+    recursive: bool
+
+
+class If(Stmt):
+    """If `test` is true, `body` is rendered, else `else_`."""
+
+    fields = ("test", "body", "elif_", "else_")
+    test: Node
+    body: t.List[Node]
+    elif_: t.List["If"]
+    else_: t.List[Node]
+
+
+class Macro(Stmt):
+    """A macro definition.  `name` is the name of the macro, `args` a list of
+    arguments and `defaults` a list of defaults if there are any.  `body` is
+    a list of nodes for the macro body.
+    """
+
+    fields = ("name", "args", "defaults", "body")
+    name: str
+    args: t.List["Name"]
+    defaults: t.List["Expr"]
+    body: t.List[Node]
+
+
+class CallBlock(Stmt):
+    """Like a macro without a name but a call instead.  `call` is called with
+    the unnamed macro as `caller` argument this node holds.
+    """
+
+    fields = ("call", "args", "defaults", "body")
+    call: "Call"
+    args: t.List["Name"]
+    defaults: t.List["Expr"]
+    body: t.List[Node]
+
+
+class FilterBlock(Stmt):
+    """Node for filter sections."""
+
+    fields = ("body", "filter")
+    body: t.List[Node]
+    filter: "Filter"
+
+
+class With(Stmt):
+    """Specific node for with statements.  In older versions of Jinja the
+    with statement was implemented on the base of the `Scope` node instead.
+
+    .. versionadded:: 2.9.3
+    """
+
+    fields = ("targets", "values", "body")
+    targets: t.List["Expr"]
+    values: t.List["Expr"]
+    body: t.List[Node]
+
+
+class Block(Stmt):
+    """A node that represents a block.
+
+    .. versionchanged:: 3.0.0
+        the `required` field was added.
+    """
+
+    fields = ("name", "body", "scoped", "required")
+    name: str
+    body: t.List[Node]
+    scoped: bool
+    required: bool
+
+
+class Include(Stmt):
+    """A node that represents the include tag."""
+
+    fields = ("template", "with_context", "ignore_missing")
+    template: "Expr"
+    with_context: bool
+    ignore_missing: bool
+
+
+class Import(Stmt):
+    """A node that represents the import tag."""
+
+    fields = ("template", "target", "with_context")
+    template: "Expr"
+    target: str
+    with_context: bool
+
+
+class FromImport(Stmt):
+    """A node that represents the from import tag.  It's important to not
+    pass unsafe names to the name attribute.  The compiler translates the
+    attribute lookups directly into getattr calls and does *not* use the
+    subscript callback of the interface.  As exported variables may not
+    start with double underscores (which the parser asserts) this is not a
+    problem for regular Jinja code, but if this node is used in an extension
+    extra care must be taken.
+
+    The list of names may contain tuples if aliases are wanted.
+    """
+
+    fields = ("template", "names", "with_context")
+    template: "Expr"
+    names: t.List[t.Union[str, t.Tuple[str, str]]]
+    with_context: bool
+
+
+class ExprStmt(Stmt):
+    """A statement that evaluates an expression and discards the result."""
+
+    fields = ("node",)
+    node: Node
+
+
+class Assign(Stmt):
+    """Assigns an expression to a target."""
+
+    fields = ("target", "node")
+    target: "Expr"
+    node: Node
+
+
+class AssignBlock(Stmt):
+    """Assigns a block to a target."""
+
+    fields = ("target", "filter", "body")
+    target: "Expr"
+    filter: t.Optional["Filter"]
+    body: t.List[Node]
+
+
+class Expr(Node):
+    """Baseclass for all expressions."""
+
+    abstract = True
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        """Return the value of the expression as constant or raise
+        :exc:`Impossible` if this was not possible.
+
+        An :class:`EvalContext` can be provided, if none is given
+        a default context is created which requires the nodes to have
+        an attached environment.
+
+        .. versionchanged:: 2.4
+           the `eval_ctx` parameter was added.
+        """
+        raise Impossible()
+
+    def can_assign(self) -> bool:
+        """Check if it's possible to assign something to this node."""
+        return False
+
+
+class BinExpr(Expr):
+    """Baseclass for all binary expressions."""
+
+    fields = ("left", "right")
+    left: Expr
+    right: Expr
+    operator: str
+    abstract = True
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        # intercepted operators cannot be folded at compile time
+        if (
+            eval_ctx.environment.sandboxed
+            and self.operator in eval_ctx.environment.intercepted_binops  # type: ignore
+        ):
+            raise Impossible()
+        f = _binop_to_func[self.operator]
+        try:
+            return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
+        except Exception as e:
+            raise Impossible() from e
+
+
+class UnaryExpr(Expr):
+    """Baseclass for all unary expressions."""
+
+    fields = ("node",)
+    node: Expr
+    operator: str
+    abstract = True
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        # intercepted operators cannot be folded at compile time
+        if (
+            eval_ctx.environment.sandboxed
+            and self.operator in eval_ctx.environment.intercepted_unops  # type: ignore
+        ):
+            raise Impossible()
+        f = _uaop_to_func[self.operator]
+        try:
+            return f(self.node.as_const(eval_ctx))
+        except Exception as e:
+            raise Impossible() from e
+
+
+class Name(Expr):
+    """Looks up a name or stores a value in a name.
+    The `ctx` of the node can be one of the following values:
+
+    -   `store`: store a value in the name
+    -   `load`: load that name
+    -   `param`: like `store` but if the name was defined as function parameter.
+    """
+
+    fields = ("name", "ctx")
+    name: str
+    ctx: str
+
+    def can_assign(self) -> bool:
+        return self.name not in {"true", "false", "none", "True", "False", "None"}
+
+
+class NSRef(Expr):
+    """Reference to a namespace value assignment"""
+
+    fields = ("name", "attr")
+    name: str
+    attr: str
+
+    def can_assign(self) -> bool:
+        # We don't need any special checks here; NSRef assignments have a
+        # runtime check to ensure the target is a namespace object which will
+        # have been checked already as it is created using a normal assignment
+        # which goes through a `Name` node.
+        return True
+
+
+class Literal(Expr):
+    """Baseclass for literals."""
+
+    abstract = True
+
+
+class Const(Literal):
+    """All constant values.  The parser will return this node for simple
+    constants such as ``42`` or ``"foo"`` but it can be used to store more
+    complex values such as lists too.  Only constants with a safe
+    representation (objects where ``eval(repr(x)) == x`` is true).
+    """
+
+    fields = ("value",)
+    value: t.Any
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        return self.value
+
+    @classmethod
+    def from_untrusted(
+        cls,
+        value: t.Any,
+        lineno: t.Optional[int] = None,
+        environment: "t.Optional[Environment]" = None,
+    ) -> "Const":
+        """Return a const object if the value is representable as
+        constant value in the generated code, otherwise it will raise
+        an `Impossible` exception.
+        """
+        from .compiler import has_safe_repr
+
+        if not has_safe_repr(value):
+            raise Impossible()
+        return cls(value, lineno=lineno, environment=environment)
+
+
+class TemplateData(Literal):
+    """A constant template string."""
+
+    fields = ("data",)
+    data: str
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        if eval_ctx.volatile:
+            raise Impossible()
+        if eval_ctx.autoescape:
+            return Markup(self.data)
+        return self.data
+
+
+class Tuple(Literal):
+    """For loop unpacking and some other things like multiple arguments
+    for subscripts.  Like for :class:`Name` `ctx` specifies if the tuple
+    is used for loading the names or storing.
+    """
+
+    fields = ("items", "ctx")
+    items: t.List[Expr]
+    ctx: str
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[t.Any, ...]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return tuple(x.as_const(eval_ctx) for x in self.items)
+
+    def can_assign(self) -> bool:
+        for item in self.items:
+            if not item.can_assign():
+                return False
+        return True
+
+
+class List(Literal):
+    """Any list literal such as ``[1, 2, 3]``"""
+
+    fields = ("items",)
+    items: t.List[Expr]
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.List[t.Any]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return [x.as_const(eval_ctx) for x in self.items]
+
+
+class Dict(Literal):
+    """Any dict literal such as ``{1: 2, 3: 4}``.  The items must be a list of
+    :class:`Pair` nodes.
+    """
+
+    fields = ("items",)
+    items: t.List["Pair"]
+
+    def as_const(
+        self, eval_ctx: t.Optional[EvalContext] = None
+    ) -> t.Dict[t.Any, t.Any]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return dict(x.as_const(eval_ctx) for x in self.items)
+
+
+class Pair(Helper):
+    """A key, value pair for dicts."""
+
+    fields = ("key", "value")
+    key: Expr
+    value: Expr
+
+    def as_const(
+        self, eval_ctx: t.Optional[EvalContext] = None
+    ) -> t.Tuple[t.Any, t.Any]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
+
+
+class Keyword(Helper):
+    """A key, value pair for keyword arguments where key is a string."""
+
+    fields = ("key", "value")
+    key: str
+    value: Expr
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[str, t.Any]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return self.key, self.value.as_const(eval_ctx)
+
+
+class CondExpr(Expr):
+    """A conditional expression (inline if expression).  (``{{
+    foo if bar else baz }}``)
+    """
+
+    fields = ("test", "expr1", "expr2")
+    test: Expr
+    expr1: Expr
+    expr2: t.Optional[Expr]
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        if self.test.as_const(eval_ctx):
+            return self.expr1.as_const(eval_ctx)
+
+        # if we evaluate to an undefined object, we better do that at runtime
+        if self.expr2 is None:
+            raise Impossible()
+
+        return self.expr2.as_const(eval_ctx)
+
+
+def args_as_const(
+    node: t.Union["_FilterTestCommon", "Call"], eval_ctx: t.Optional[EvalContext]
+) -> t.Tuple[t.List[t.Any], t.Dict[t.Any, t.Any]]:
+    args = [x.as_const(eval_ctx) for x in node.args]
+    kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
+
+    if node.dyn_args is not None:
+        try:
+            args.extend(node.dyn_args.as_const(eval_ctx))
+        except Exception as e:
+            raise Impossible() from e
+
+    if node.dyn_kwargs is not None:
+        try:
+            kwargs.update(node.dyn_kwargs.as_const(eval_ctx))
+        except Exception as e:
+            raise Impossible() from e
+
+    return args, kwargs
+
+
+class _FilterTestCommon(Expr):
+    fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
+    node: Expr
+    name: str
+    args: t.List[Expr]
+    kwargs: t.List[Pair]
+    dyn_args: t.Optional[Expr]
+    dyn_kwargs: t.Optional[Expr]
+    abstract = True
+    _is_filter = True
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        if eval_ctx.volatile:
+            raise Impossible()
+
+        if self._is_filter:
+            env_map = eval_ctx.environment.filters
+        else:
+            env_map = eval_ctx.environment.tests
+
+        func = env_map.get(self.name)
+        pass_arg = _PassArg.from_obj(func)  # type: ignore
+
+        if func is None or pass_arg is _PassArg.context:
+            raise Impossible()
+
+        if eval_ctx.environment.is_async and (
+            getattr(func, "jinja_async_variant", False) is True
+            or inspect.iscoroutinefunction(func)
+        ):
+            raise Impossible()
+
+        args, kwargs = args_as_const(self, eval_ctx)
+        args.insert(0, self.node.as_const(eval_ctx))
+
+        if pass_arg is _PassArg.eval_context:
+            args.insert(0, eval_ctx)
+        elif pass_arg is _PassArg.environment:
+            args.insert(0, eval_ctx.environment)
+
+        try:
+            return func(*args, **kwargs)
+        except Exception as e:
+            raise Impossible() from e
+
+
+class Filter(_FilterTestCommon):
+    """Apply a filter to an expression. ``name`` is the name of the
+    filter, the other fields are the same as :class:`Call`.
+
+    If ``node`` is ``None``, the filter is being used in a filter block
+    and is applied to the content of the block.
+    """
+
+    node: t.Optional[Expr]  # type: ignore
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        if self.node is None:
+            raise Impossible()
+
+        return super().as_const(eval_ctx=eval_ctx)
+
+
+class Test(_FilterTestCommon):
+    """Apply a test to an expression. ``name`` is the name of the test,
+    the other field are the same as :class:`Call`.
+
+    .. versionchanged:: 3.0
+        ``as_const`` shares the same logic for filters and tests. Tests
+        check for volatile, async, and ``@pass_context`` etc.
+        decorators.
+    """
+
+    _is_filter = False
+
+
+class Call(Expr):
+    """Calls an expression.  `args` is a list of arguments, `kwargs` a list
+    of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
+    and `dyn_kwargs` has to be either `None` or a node that is used as
+    node for dynamic positional (``*args``) or keyword (``**kwargs``)
+    arguments.
+    """
+
+    fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs")
+    node: Expr
+    args: t.List[Expr]
+    kwargs: t.List[Keyword]
+    dyn_args: t.Optional[Expr]
+    dyn_kwargs: t.Optional[Expr]
+
+
+class Getitem(Expr):
+    """Get an attribute or item from an expression and prefer the item."""
+
+    fields = ("node", "arg", "ctx")
+    node: Expr
+    arg: Expr
+    ctx: str
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        if self.ctx != "load":
+            raise Impossible()
+
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        try:
+            return eval_ctx.environment.getitem(
+                self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx)
+            )
+        except Exception as e:
+            raise Impossible() from e
+
+
+class Getattr(Expr):
+    """Get an attribute or item from an expression that is a ascii-only
+    bytestring and prefer the attribute.
+    """
+
+    fields = ("node", "attr", "ctx")
+    node: Expr
+    attr: str
+    ctx: str
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        if self.ctx != "load":
+            raise Impossible()
+
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        try:
+            return eval_ctx.environment.getattr(self.node.as_const(eval_ctx), self.attr)
+        except Exception as e:
+            raise Impossible() from e
+
+
+class Slice(Expr):
+    """Represents a slice object.  This must only be used as argument for
+    :class:`Subscript`.
+    """
+
+    fields = ("start", "stop", "step")
+    start: t.Optional[Expr]
+    stop: t.Optional[Expr]
+    step: t.Optional[Expr]
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> slice:
+        eval_ctx = get_eval_context(self, eval_ctx)
+
+        def const(obj: t.Optional[Expr]) -> t.Optional[t.Any]:
+            if obj is None:
+                return None
+            return obj.as_const(eval_ctx)
+
+        return slice(const(self.start), const(self.stop), const(self.step))
+
+
+class Concat(Expr):
+    """Concatenates the list of expressions provided after converting
+    them to strings.
+    """
+
+    fields = ("nodes",)
+    nodes: t.List[Expr]
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return "".join(str(x.as_const(eval_ctx)) for x in self.nodes)
+
+
+class Compare(Expr):
+    """Compares an expression with some other expressions.  `ops` must be a
+    list of :class:`Operand`\\s.
+    """
+
+    fields = ("expr", "ops")
+    expr: Expr
+    ops: t.List["Operand"]
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        result = value = self.expr.as_const(eval_ctx)
+
+        try:
+            for op in self.ops:
+                new_value = op.expr.as_const(eval_ctx)
+                result = _cmpop_to_func[op.op](value, new_value)
+
+                if not result:
+                    return False
+
+                value = new_value
+        except Exception as e:
+            raise Impossible() from e
+
+        return result
+
+
+class Operand(Helper):
+    """Holds an operator and an expression."""
+
+    fields = ("op", "expr")
+    op: str
+    expr: Expr
+
+
+class Mul(BinExpr):
+    """Multiplies the left with the right node."""
+
+    operator = "*"
+
+
+class Div(BinExpr):
+    """Divides the left by the right node."""
+
+    operator = "/"
+
+
+class FloorDiv(BinExpr):
+    """Divides the left by the right node and converts the
+    result into an integer by truncating.
+    """
+
+    operator = "//"
+
+
+class Add(BinExpr):
+    """Add the left to the right node."""
+
+    operator = "+"
+
+
+class Sub(BinExpr):
+    """Subtract the right from the left node."""
+
+    operator = "-"
+
+
+class Mod(BinExpr):
+    """Left modulo right."""
+
+    operator = "%"
+
+
+class Pow(BinExpr):
+    """Left to the power of right."""
+
+    operator = "**"
+
+
+class And(BinExpr):
+    """Short circuited AND."""
+
+    operator = "and"
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
+
+
+class Or(BinExpr):
+    """Short circuited OR."""
+
+    operator = "or"
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
+
+
+class Not(UnaryExpr):
+    """Negate the expression."""
+
+    operator = "not"
+
+
+class Neg(UnaryExpr):
+    """Make the expression negative."""
+
+    operator = "-"
+
+
+class Pos(UnaryExpr):
+    """Make the expression positive (noop for most expressions)"""
+
+    operator = "+"
+
+
+# Helpers for extensions
+
+
+class EnvironmentAttribute(Expr):
+    """Loads an attribute from the environment object.  This is useful for
+    extensions that want to call a callback stored on the environment.
+    """
+
+    fields = ("name",)
+    name: str
+
+
+class ExtensionAttribute(Expr):
+    """Returns the attribute of an extension bound to the environment.
+    The identifier is the identifier of the :class:`Extension`.
+
+    This node is usually constructed by calling the
+    :meth:`~jinja2.ext.Extension.attr` method on an extension.
+    """
+
+    fields = ("identifier", "name")
+    identifier: str
+    name: str
+
+
+class ImportedName(Expr):
+    """If created with an import name the import name is returned on node
+    access.  For example ``ImportedName('cgi.escape')`` returns the `escape`
+    function from the cgi module on evaluation.  Imports are optimized by the
+    compiler so there is no need to assign them to local variables.
+    """
+
+    fields = ("importname",)
+    importname: str
+
+
+class InternalName(Expr):
+    """An internal name in the compiler.  You cannot create these nodes
+    yourself but the parser provides a
+    :meth:`~jinja2.parser.Parser.free_identifier` method that creates
+    a new identifier for you.  This identifier is not available from the
+    template and is not treated specially by the compiler.
+    """
+
+    fields = ("name",)
+    name: str
+
+    def __init__(self) -> None:
+        raise TypeError(
+            "Can't create internal names.  Use the "
+            "`free_identifier` method on a parser."
+        )
+
+
+class MarkSafe(Expr):
+    """Mark the wrapped expression as safe (wrap it as `Markup`)."""
+
+    fields = ("expr",)
+    expr: Expr
+
+    def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> Markup:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        return Markup(self.expr.as_const(eval_ctx))
+
+
+class MarkSafeIfAutoescape(Expr):
+    """Mark the wrapped expression as safe (wrap it as `Markup`) but
+    only if autoescaping is active.
+
+    .. versionadded:: 2.5
+    """
+
+    fields = ("expr",)
+    expr: Expr
+
+    def as_const(
+        self, eval_ctx: t.Optional[EvalContext] = None
+    ) -> t.Union[Markup, t.Any]:
+        eval_ctx = get_eval_context(self, eval_ctx)
+        if eval_ctx.volatile:
+            raise Impossible()
+        expr = self.expr.as_const(eval_ctx)
+        if eval_ctx.autoescape:
+            return Markup(expr)
+        return expr
+
+
+class ContextReference(Expr):
+    """Returns the current template context.  It can be used like a
+    :class:`Name` node, with a ``'load'`` ctx and will return the
+    current :class:`~jinja2.runtime.Context` object.
+
+    Here an example that assigns the current template name to a
+    variable named `foo`::
+
+        Assign(Name('foo', ctx='store'),
+               Getattr(ContextReference(), 'name'))
+
+    This is basically equivalent to using the
+    :func:`~jinja2.pass_context` decorator when using the high-level
+    API, which causes a reference to the context to be passed as the
+    first argument to a function.
+    """
+
+
+class DerivedContextReference(Expr):
+    """Return the current template context including locals. Behaves
+    exactly like :class:`ContextReference`, but includes local
+    variables, such as from a ``for`` loop.
+
+    .. versionadded:: 2.11
+    """
+
+
+class Continue(Stmt):
+    """Continue a loop."""
+
+
+class Break(Stmt):
+    """Break a loop."""
+
+
+class Scope(Stmt):
+    """An artificial scope."""
+
+    fields = ("body",)
+    body: t.List[Node]
+
+
+class OverlayScope(Stmt):
+    """An overlay scope for extensions.  This is a largely unoptimized scope
+    that however can be used to introduce completely arbitrary variables into
+    a sub scope from a dictionary or dictionary like object.  The `context`
+    field has to evaluate to a dictionary object.
+
+    Example usage::
+
+        OverlayScope(context=self.call_method('get_context'),
+                     body=[...])
+
+    .. versionadded:: 2.10
+    """
+
+    fields = ("context", "body")
+    context: Expr
+    body: t.List[Node]
+
+
+class EvalContextModifier(Stmt):
+    """Modifies the eval context.  For each option that should be modified,
+    a :class:`Keyword` has to be added to the :attr:`options` list.
+
+    Example to change the `autoescape` setting::
+
+        EvalContextModifier(options=[Keyword('autoescape', Const(True))])
+    """
+
+    fields = ("options",)
+    options: t.List[Keyword]
+
+
+class ScopedEvalContextModifier(EvalContextModifier):
+    """Modifies the eval context and reverts it later.  Works exactly like
+    :class:`EvalContextModifier` but will only modify the
+    :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
+    """
+
+    fields = ("body",)
+    body: t.List[Node]
+
+
+# make sure nobody creates custom nodes
+def _failing_new(*args: t.Any, **kwargs: t.Any) -> "te.NoReturn":
+    raise TypeError("can't create custom node types")
+
+
+NodeType.__new__ = staticmethod(_failing_new)  # type: ignore
+del _failing_new
diff --git a/venv/lib/python3.9/site-packages/jinja2/optimizer.py b/venv/lib/python3.9/site-packages/jinja2/optimizer.py
new file mode 100644
index 0000000..fe10107
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/optimizer.py
@@ -0,0 +1,47 @@
+"""The optimizer tries to constant fold expressions and modify the AST
+in place so that it should be faster to evaluate.
+
+Because the AST does not contain all the scoping information and the
+compiler has to find that out, we cannot do all the optimizations we
+want. For example, loop unrolling doesn't work because unrolled loops
+would have a different scope. The solution would be a second syntax tree
+that stored the scoping rules.
+"""
+import typing as t
+
+from . import nodes
+from .visitor import NodeTransformer
+
+if t.TYPE_CHECKING:
+    from .environment import Environment
+
+
+def optimize(node: nodes.Node, environment: "Environment") -> nodes.Node:
+    """The context hint can be used to perform an static optimization
+    based on the context given."""
+    optimizer = Optimizer(environment)
+    return t.cast(nodes.Node, optimizer.visit(node))
+
+
+class Optimizer(NodeTransformer):
+    def __init__(self, environment: "t.Optional[Environment]") -> None:
+        self.environment = environment
+
+    def generic_visit(
+        self, node: nodes.Node, *args: t.Any, **kwargs: t.Any
+    ) -> nodes.Node:
+        node = super().generic_visit(node, *args, **kwargs)
+
+        # Do constant folding. Some other nodes besides Expr have
+        # as_const, but folding them causes errors later on.
+        if isinstance(node, nodes.Expr):
+            try:
+                return nodes.Const.from_untrusted(
+                    node.as_const(args[0] if args else None),
+                    lineno=node.lineno,
+                    environment=self.environment,
+                )
+            except nodes.Impossible:
+                pass
+
+        return node
diff --git a/venv/lib/python3.9/site-packages/jinja2/parser.py b/venv/lib/python3.9/site-packages/jinja2/parser.py
new file mode 100644
index 0000000..cefce2d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/parser.py
@@ -0,0 +1,1032 @@
+"""Parse tokens from the lexer into nodes for the compiler."""
+import typing
+import typing as t
+
+from . import nodes
+from .exceptions import TemplateAssertionError
+from .exceptions import TemplateSyntaxError
+from .lexer import describe_token
+from .lexer import describe_token_expr
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+    from .environment import Environment
+
+_ImportInclude = t.TypeVar("_ImportInclude", nodes.Import, nodes.Include)
+_MacroCall = t.TypeVar("_MacroCall", nodes.Macro, nodes.CallBlock)
+
+_statement_keywords = frozenset(
+    [
+        "for",
+        "if",
+        "block",
+        "extends",
+        "print",
+        "macro",
+        "include",
+        "from",
+        "import",
+        "set",
+        "with",
+        "autoescape",
+    ]
+)
+_compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"])
+
+_math_nodes: t.Dict[str, t.Type[nodes.Expr]] = {
+    "add": nodes.Add,
+    "sub": nodes.Sub,
+    "mul": nodes.Mul,
+    "div": nodes.Div,
+    "floordiv": nodes.FloorDiv,
+    "mod": nodes.Mod,
+}
+
+
+class Parser:
+    """This is the central parsing class Jinja uses.  It's passed to
+    extensions and can be used to parse expressions or statements.
+    """
+
+    def __init__(
+        self,
+        environment: "Environment",
+        source: str,
+        name: t.Optional[str] = None,
+        filename: t.Optional[str] = None,
+        state: t.Optional[str] = None,
+    ) -> None:
+        self.environment = environment
+        self.stream = environment._tokenize(source, name, filename, state)
+        self.name = name
+        self.filename = filename
+        self.closed = False
+        self.extensions: t.Dict[
+            str, t.Callable[["Parser"], t.Union[nodes.Node, t.List[nodes.Node]]]
+        ] = {}
+        for extension in environment.iter_extensions():
+            for tag in extension.tags:
+                self.extensions[tag] = extension.parse
+        self._last_identifier = 0
+        self._tag_stack: t.List[str] = []
+        self._end_token_stack: t.List[t.Tuple[str, ...]] = []
+
+    def fail(
+        self,
+        msg: str,
+        lineno: t.Optional[int] = None,
+        exc: t.Type[TemplateSyntaxError] = TemplateSyntaxError,
+    ) -> "te.NoReturn":
+        """Convenience method that raises `exc` with the message, passed
+        line number or last line number as well as the current name and
+        filename.
+        """
+        if lineno is None:
+            lineno = self.stream.current.lineno
+        raise exc(msg, lineno, self.name, self.filename)
+
+    def _fail_ut_eof(
+        self,
+        name: t.Optional[str],
+        end_token_stack: t.List[t.Tuple[str, ...]],
+        lineno: t.Optional[int],
+    ) -> "te.NoReturn":
+        expected: t.Set[str] = set()
+        for exprs in end_token_stack:
+            expected.update(map(describe_token_expr, exprs))
+        if end_token_stack:
+            currently_looking: t.Optional[str] = " or ".join(
+                map(repr, map(describe_token_expr, end_token_stack[-1]))
+            )
+        else:
+            currently_looking = None
+
+        if name is None:
+            message = ["Unexpected end of template."]
+        else:
+            message = [f"Encountered unknown tag {name!r}."]
+
+        if currently_looking:
+            if name is not None and name in expected:
+                message.append(
+                    "You probably made a nesting mistake. Jinja is expecting this tag,"
+                    f" but currently looking for {currently_looking}."
+                )
+            else:
+                message.append(
+                    f"Jinja was looking for the following tags: {currently_looking}."
+                )
+
+        if self._tag_stack:
+            message.append(
+                "The innermost block that needs to be closed is"
+                f" {self._tag_stack[-1]!r}."
+            )
+
+        self.fail(" ".join(message), lineno)
+
+    def fail_unknown_tag(
+        self, name: str, lineno: t.Optional[int] = None
+    ) -> "te.NoReturn":
+        """Called if the parser encounters an unknown tag.  Tries to fail
+        with a human readable error message that could help to identify
+        the problem.
+        """
+        self._fail_ut_eof(name, self._end_token_stack, lineno)
+
+    def fail_eof(
+        self,
+        end_tokens: t.Optional[t.Tuple[str, ...]] = None,
+        lineno: t.Optional[int] = None,
+    ) -> "te.NoReturn":
+        """Like fail_unknown_tag but for end of template situations."""
+        stack = list(self._end_token_stack)
+        if end_tokens is not None:
+            stack.append(end_tokens)
+        self._fail_ut_eof(None, stack, lineno)
+
+    def is_tuple_end(
+        self, extra_end_rules: t.Optional[t.Tuple[str, ...]] = None
+    ) -> bool:
+        """Are we at the end of a tuple?"""
+        if self.stream.current.type in ("variable_end", "block_end", "rparen"):
+            return True
+        elif extra_end_rules is not None:
+            return self.stream.current.test_any(extra_end_rules)  # type: ignore
+        return False
+
+    def free_identifier(self, lineno: t.Optional[int] = None) -> nodes.InternalName:
+        """Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
+        self._last_identifier += 1
+        rv = object.__new__(nodes.InternalName)
+        nodes.Node.__init__(rv, f"fi{self._last_identifier}", lineno=lineno)
+        return rv
+
+    def parse_statement(self) -> t.Union[nodes.Node, t.List[nodes.Node]]:
+        """Parse a single statement."""
+        token = self.stream.current
+        if token.type != "name":
+            self.fail("tag name expected", token.lineno)
+        self._tag_stack.append(token.value)
+        pop_tag = True
+        try:
+            if token.value in _statement_keywords:
+                f = getattr(self, f"parse_{self.stream.current.value}")
+                return f()  # type: ignore
+            if token.value == "call":
+                return self.parse_call_block()
+            if token.value == "filter":
+                return self.parse_filter_block()
+            ext = self.extensions.get(token.value)
+            if ext is not None:
+                return ext(self)
+
+            # did not work out, remove the token we pushed by accident
+            # from the stack so that the unknown tag fail function can
+            # produce a proper error message.
+            self._tag_stack.pop()
+            pop_tag = False
+            self.fail_unknown_tag(token.value, token.lineno)
+        finally:
+            if pop_tag:
+                self._tag_stack.pop()
+
+    def parse_statements(
+        self, end_tokens: t.Tuple[str, ...], drop_needle: bool = False
+    ) -> t.List[nodes.Node]:
+        """Parse multiple statements into a list until one of the end tokens
+        is reached.  This is used to parse the body of statements as it also
+        parses template data if appropriate.  The parser checks first if the
+        current token is a colon and skips it if there is one.  Then it checks
+        for the block end and parses until if one of the `end_tokens` is
+        reached.  Per default the active token in the stream at the end of
+        the call is the matched end token.  If this is not wanted `drop_needle`
+        can be set to `True` and the end token is removed.
+        """
+        # the first token may be a colon for python compatibility
+        self.stream.skip_if("colon")
+
+        # in the future it would be possible to add whole code sections
+        # by adding some sort of end of statement token and parsing those here.
+        self.stream.expect("block_end")
+        result = self.subparse(end_tokens)
+
+        # we reached the end of the template too early, the subparser
+        # does not check for this, so we do that now
+        if self.stream.current.type == "eof":
+            self.fail_eof(end_tokens)
+
+        if drop_needle:
+            next(self.stream)
+        return result
+
+    def parse_set(self) -> t.Union[nodes.Assign, nodes.AssignBlock]:
+        """Parse an assign statement."""
+        lineno = next(self.stream).lineno
+        target = self.parse_assign_target(with_namespace=True)
+        if self.stream.skip_if("assign"):
+            expr = self.parse_tuple()
+            return nodes.Assign(target, expr, lineno=lineno)
+        filter_node = self.parse_filter(None)
+        body = self.parse_statements(("name:endset",), drop_needle=True)
+        return nodes.AssignBlock(target, filter_node, body, lineno=lineno)
+
+    def parse_for(self) -> nodes.For:
+        """Parse a for loop."""
+        lineno = self.stream.expect("name:for").lineno
+        target = self.parse_assign_target(extra_end_rules=("name:in",))
+        self.stream.expect("name:in")
+        iter = self.parse_tuple(
+            with_condexpr=False, extra_end_rules=("name:recursive",)
+        )
+        test = None
+        if self.stream.skip_if("name:if"):
+            test = self.parse_expression()
+        recursive = self.stream.skip_if("name:recursive")
+        body = self.parse_statements(("name:endfor", "name:else"))
+        if next(self.stream).value == "endfor":
+            else_ = []
+        else:
+            else_ = self.parse_statements(("name:endfor",), drop_needle=True)
+        return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno)
+
+    def parse_if(self) -> nodes.If:
+        """Parse an if construct."""
+        node = result = nodes.If(lineno=self.stream.expect("name:if").lineno)
+        while True:
+            node.test = self.parse_tuple(with_condexpr=False)
+            node.body = self.parse_statements(("name:elif", "name:else", "name:endif"))
+            node.elif_ = []
+            node.else_ = []
+            token = next(self.stream)
+            if token.test("name:elif"):
+                node = nodes.If(lineno=self.stream.current.lineno)
+                result.elif_.append(node)
+                continue
+            elif token.test("name:else"):
+                result.else_ = self.parse_statements(("name:endif",), drop_needle=True)
+            break
+        return result
+
+    def parse_with(self) -> nodes.With:
+        node = nodes.With(lineno=next(self.stream).lineno)
+        targets: t.List[nodes.Expr] = []
+        values: t.List[nodes.Expr] = []
+        while self.stream.current.type != "block_end":
+            if targets:
+                self.stream.expect("comma")
+            target = self.parse_assign_target()
+            target.set_ctx("param")
+            targets.append(target)
+            self.stream.expect("assign")
+            values.append(self.parse_expression())
+        node.targets = targets
+        node.values = values
+        node.body = self.parse_statements(("name:endwith",), drop_needle=True)
+        return node
+
+    def parse_autoescape(self) -> nodes.Scope:
+        node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno)
+        node.options = [nodes.Keyword("autoescape", self.parse_expression())]
+        node.body = self.parse_statements(("name:endautoescape",), drop_needle=True)
+        return nodes.Scope([node])
+
+    def parse_block(self) -> nodes.Block:
+        node = nodes.Block(lineno=next(self.stream).lineno)
+        node.name = self.stream.expect("name").value
+        node.scoped = self.stream.skip_if("name:scoped")
+        node.required = self.stream.skip_if("name:required")
+
+        # common problem people encounter when switching from django
+        # to jinja.  we do not support hyphens in block names, so let's
+        # raise a nicer error message in that case.
+        if self.stream.current.type == "sub":
+            self.fail(
+                "Block names in Jinja have to be valid Python identifiers and may not"
+                " contain hyphens, use an underscore instead."
+            )
+
+        node.body = self.parse_statements(("name:endblock",), drop_needle=True)
+
+        # enforce that required blocks only contain whitespace or comments
+        # by asserting that the body, if not empty, is just TemplateData nodes
+        # with whitespace data
+        if node.required and not all(
+            isinstance(child, nodes.TemplateData) and child.data.isspace()
+            for body in node.body
+            for child in body.nodes  # type: ignore
+        ):
+            self.fail("Required blocks can only contain comments or whitespace")
+
+        self.stream.skip_if("name:" + node.name)
+        return node
+
+    def parse_extends(self) -> nodes.Extends:
+        node = nodes.Extends(lineno=next(self.stream).lineno)
+        node.template = self.parse_expression()
+        return node
+
+    def parse_import_context(
+        self, node: _ImportInclude, default: bool
+    ) -> _ImportInclude:
+        if self.stream.current.test_any(
+            "name:with", "name:without"
+        ) and self.stream.look().test("name:context"):
+            node.with_context = next(self.stream).value == "with"
+            self.stream.skip()
+        else:
+            node.with_context = default
+        return node
+
+    def parse_include(self) -> nodes.Include:
+        node = nodes.Include(lineno=next(self.stream).lineno)
+        node.template = self.parse_expression()
+        if self.stream.current.test("name:ignore") and self.stream.look().test(
+            "name:missing"
+        ):
+            node.ignore_missing = True
+            self.stream.skip(2)
+        else:
+            node.ignore_missing = False
+        return self.parse_import_context(node, True)
+
+    def parse_import(self) -> nodes.Import:
+        node = nodes.Import(lineno=next(self.stream).lineno)
+        node.template = self.parse_expression()
+        self.stream.expect("name:as")
+        node.target = self.parse_assign_target(name_only=True).name
+        return self.parse_import_context(node, False)
+
+    def parse_from(self) -> nodes.FromImport:
+        node = nodes.FromImport(lineno=next(self.stream).lineno)
+        node.template = self.parse_expression()
+        self.stream.expect("name:import")
+        node.names = []
+
+        def parse_context() -> bool:
+            if self.stream.current.value in {
+                "with",
+                "without",
+            } and self.stream.look().test("name:context"):
+                node.with_context = next(self.stream).value == "with"
+                self.stream.skip()
+                return True
+            return False
+
+        while True:
+            if node.names:
+                self.stream.expect("comma")
+            if self.stream.current.type == "name":
+                if parse_context():
+                    break
+                target = self.parse_assign_target(name_only=True)
+                if target.name.startswith("_"):
+                    self.fail(
+                        "names starting with an underline can not be imported",
+                        target.lineno,
+                        exc=TemplateAssertionError,
+                    )
+                if self.stream.skip_if("name:as"):
+                    alias = self.parse_assign_target(name_only=True)
+                    node.names.append((target.name, alias.name))
+                else:
+                    node.names.append(target.name)
+                if parse_context() or self.stream.current.type != "comma":
+                    break
+            else:
+                self.stream.expect("name")
+        if not hasattr(node, "with_context"):
+            node.with_context = False
+        return node
+
+    def parse_signature(self, node: _MacroCall) -> None:
+        args = node.args = []
+        defaults = node.defaults = []
+        self.stream.expect("lparen")
+        while self.stream.current.type != "rparen":
+            if args:
+                self.stream.expect("comma")
+            arg = self.parse_assign_target(name_only=True)
+            arg.set_ctx("param")
+            if self.stream.skip_if("assign"):
+                defaults.append(self.parse_expression())
+            elif defaults:
+                self.fail("non-default argument follows default argument")
+            args.append(arg)
+        self.stream.expect("rparen")
+
+    def parse_call_block(self) -> nodes.CallBlock:
+        node = nodes.CallBlock(lineno=next(self.stream).lineno)
+        if self.stream.current.type == "lparen":
+            self.parse_signature(node)
+        else:
+            node.args = []
+            node.defaults = []
+
+        call_node = self.parse_expression()
+        if not isinstance(call_node, nodes.Call):
+            self.fail("expected call", node.lineno)
+        node.call = call_node
+        node.body = self.parse_statements(("name:endcall",), drop_needle=True)
+        return node
+
+    def parse_filter_block(self) -> nodes.FilterBlock:
+        node = nodes.FilterBlock(lineno=next(self.stream).lineno)
+        node.filter = self.parse_filter(None, start_inline=True)  # type: ignore
+        node.body = self.parse_statements(("name:endfilter",), drop_needle=True)
+        return node
+
+    def parse_macro(self) -> nodes.Macro:
+        node = nodes.Macro(lineno=next(self.stream).lineno)
+        node.name = self.parse_assign_target(name_only=True).name
+        self.parse_signature(node)
+        node.body = self.parse_statements(("name:endmacro",), drop_needle=True)
+        return node
+
+    def parse_print(self) -> nodes.Output:
+        node = nodes.Output(lineno=next(self.stream).lineno)
+        node.nodes = []
+        while self.stream.current.type != "block_end":
+            if node.nodes:
+                self.stream.expect("comma")
+            node.nodes.append(self.parse_expression())
+        return node
+
+    @typing.overload
+    def parse_assign_target(
+        self, with_tuple: bool = ..., name_only: "te.Literal[True]" = ...
+    ) -> nodes.Name:
+        ...
+
+    @typing.overload
+    def parse_assign_target(
+        self,
+        with_tuple: bool = True,
+        name_only: bool = False,
+        extra_end_rules: t.Optional[t.Tuple[str, ...]] = None,
+        with_namespace: bool = False,
+    ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]:
+        ...
+
+    def parse_assign_target(
+        self,
+        with_tuple: bool = True,
+        name_only: bool = False,
+        extra_end_rules: t.Optional[t.Tuple[str, ...]] = None,
+        with_namespace: bool = False,
+    ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]:
+        """Parse an assignment target.  As Jinja allows assignments to
+        tuples, this function can parse all allowed assignment targets.  Per
+        default assignments to tuples are parsed, that can be disable however
+        by setting `with_tuple` to `False`.  If only assignments to names are
+        wanted `name_only` can be set to `True`.  The `extra_end_rules`
+        parameter is forwarded to the tuple parsing function.  If
+        `with_namespace` is enabled, a namespace assignment may be parsed.
+        """
+        target: nodes.Expr
+
+        if with_namespace and self.stream.look().type == "dot":
+            token = self.stream.expect("name")
+            next(self.stream)  # dot
+            attr = self.stream.expect("name")
+            target = nodes.NSRef(token.value, attr.value, lineno=token.lineno)
+        elif name_only:
+            token = self.stream.expect("name")
+            target = nodes.Name(token.value, "store", lineno=token.lineno)
+        else:
+            if with_tuple:
+                target = self.parse_tuple(
+                    simplified=True, extra_end_rules=extra_end_rules
+                )
+            else:
+                target = self.parse_primary()
+
+            target.set_ctx("store")
+
+        if not target.can_assign():
+            self.fail(
+                f"can't assign to {type(target).__name__.lower()!r}", target.lineno
+            )
+
+        return target  # type: ignore
+
+    def parse_expression(self, with_condexpr: bool = True) -> nodes.Expr:
+        """Parse an expression.  Per default all expressions are parsed, if
+        the optional `with_condexpr` parameter is set to `False` conditional
+        expressions are not parsed.
+        """
+        if with_condexpr:
+            return self.parse_condexpr()
+        return self.parse_or()
+
+    def parse_condexpr(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        expr1 = self.parse_or()
+        expr3: t.Optional[nodes.Expr]
+
+        while self.stream.skip_if("name:if"):
+            expr2 = self.parse_or()
+            if self.stream.skip_if("name:else"):
+                expr3 = self.parse_condexpr()
+            else:
+                expr3 = None
+            expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return expr1
+
+    def parse_or(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        left = self.parse_and()
+        while self.stream.skip_if("name:or"):
+            right = self.parse_and()
+            left = nodes.Or(left, right, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return left
+
+    def parse_and(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        left = self.parse_not()
+        while self.stream.skip_if("name:and"):
+            right = self.parse_not()
+            left = nodes.And(left, right, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return left
+
+    def parse_not(self) -> nodes.Expr:
+        if self.stream.current.test("name:not"):
+            lineno = next(self.stream).lineno
+            return nodes.Not(self.parse_not(), lineno=lineno)
+        return self.parse_compare()
+
+    def parse_compare(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        expr = self.parse_math1()
+        ops = []
+        while True:
+            token_type = self.stream.current.type
+            if token_type in _compare_operators:
+                next(self.stream)
+                ops.append(nodes.Operand(token_type, self.parse_math1()))
+            elif self.stream.skip_if("name:in"):
+                ops.append(nodes.Operand("in", self.parse_math1()))
+            elif self.stream.current.test("name:not") and self.stream.look().test(
+                "name:in"
+            ):
+                self.stream.skip(2)
+                ops.append(nodes.Operand("notin", self.parse_math1()))
+            else:
+                break
+            lineno = self.stream.current.lineno
+        if not ops:
+            return expr
+        return nodes.Compare(expr, ops, lineno=lineno)
+
+    def parse_math1(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        left = self.parse_concat()
+        while self.stream.current.type in ("add", "sub"):
+            cls = _math_nodes[self.stream.current.type]
+            next(self.stream)
+            right = self.parse_concat()
+            left = cls(left, right, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return left
+
+    def parse_concat(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        args = [self.parse_math2()]
+        while self.stream.current.type == "tilde":
+            next(self.stream)
+            args.append(self.parse_math2())
+        if len(args) == 1:
+            return args[0]
+        return nodes.Concat(args, lineno=lineno)
+
+    def parse_math2(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        left = self.parse_pow()
+        while self.stream.current.type in ("mul", "div", "floordiv", "mod"):
+            cls = _math_nodes[self.stream.current.type]
+            next(self.stream)
+            right = self.parse_pow()
+            left = cls(left, right, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return left
+
+    def parse_pow(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        left = self.parse_unary()
+        while self.stream.current.type == "pow":
+            next(self.stream)
+            right = self.parse_unary()
+            left = nodes.Pow(left, right, lineno=lineno)
+            lineno = self.stream.current.lineno
+        return left
+
+    def parse_unary(self, with_filter: bool = True) -> nodes.Expr:
+        token_type = self.stream.current.type
+        lineno = self.stream.current.lineno
+        node: nodes.Expr
+
+        if token_type == "sub":
+            next(self.stream)
+            node = nodes.Neg(self.parse_unary(False), lineno=lineno)
+        elif token_type == "add":
+            next(self.stream)
+            node = nodes.Pos(self.parse_unary(False), lineno=lineno)
+        else:
+            node = self.parse_primary()
+        node = self.parse_postfix(node)
+        if with_filter:
+            node = self.parse_filter_expr(node)
+        return node
+
+    def parse_primary(self) -> nodes.Expr:
+        token = self.stream.current
+        node: nodes.Expr
+        if token.type == "name":
+            if token.value in ("true", "false", "True", "False"):
+                node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno)
+            elif token.value in ("none", "None"):
+                node = nodes.Const(None, lineno=token.lineno)
+            else:
+                node = nodes.Name(token.value, "load", lineno=token.lineno)
+            next(self.stream)
+        elif token.type == "string":
+            next(self.stream)
+            buf = [token.value]
+            lineno = token.lineno
+            while self.stream.current.type == "string":
+                buf.append(self.stream.current.value)
+                next(self.stream)
+            node = nodes.Const("".join(buf), lineno=lineno)
+        elif token.type in ("integer", "float"):
+            next(self.stream)
+            node = nodes.Const(token.value, lineno=token.lineno)
+        elif token.type == "lparen":
+            next(self.stream)
+            node = self.parse_tuple(explicit_parentheses=True)
+            self.stream.expect("rparen")
+        elif token.type == "lbracket":
+            node = self.parse_list()
+        elif token.type == "lbrace":
+            node = self.parse_dict()
+        else:
+            self.fail(f"unexpected {describe_token(token)!r}", token.lineno)
+        return node
+
+    def parse_tuple(
+        self,
+        simplified: bool = False,
+        with_condexpr: bool = True,
+        extra_end_rules: t.Optional[t.Tuple[str, ...]] = None,
+        explicit_parentheses: bool = False,
+    ) -> t.Union[nodes.Tuple, nodes.Expr]:
+        """Works like `parse_expression` but if multiple expressions are
+        delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
+        This method could also return a regular expression instead of a tuple
+        if no commas where found.
+
+        The default parsing mode is a full tuple.  If `simplified` is `True`
+        only names and literals are parsed.  The `no_condexpr` parameter is
+        forwarded to :meth:`parse_expression`.
+
+        Because tuples do not require delimiters and may end in a bogus comma
+        an extra hint is needed that marks the end of a tuple.  For example
+        for loops support tuples between `for` and `in`.  In that case the
+        `extra_end_rules` is set to ``['name:in']``.
+
+        `explicit_parentheses` is true if the parsing was triggered by an
+        expression in parentheses.  This is used to figure out if an empty
+        tuple is a valid expression or not.
+        """
+        lineno = self.stream.current.lineno
+        if simplified:
+            parse = self.parse_primary
+        elif with_condexpr:
+            parse = self.parse_expression
+        else:
+
+            def parse() -> nodes.Expr:
+                return self.parse_expression(with_condexpr=False)
+
+        args: t.List[nodes.Expr] = []
+        is_tuple = False
+
+        while True:
+            if args:
+                self.stream.expect("comma")
+            if self.is_tuple_end(extra_end_rules):
+                break
+            args.append(parse())
+            if self.stream.current.type == "comma":
+                is_tuple = True
+            else:
+                break
+            lineno = self.stream.current.lineno
+
+        if not is_tuple:
+            if args:
+                return args[0]
+
+            # if we don't have explicit parentheses, an empty tuple is
+            # not a valid expression.  This would mean nothing (literally
+            # nothing) in the spot of an expression would be an empty
+            # tuple.
+            if not explicit_parentheses:
+                self.fail(
+                    "Expected an expression,"
+                    f" got {describe_token(self.stream.current)!r}"
+                )
+
+        return nodes.Tuple(args, "load", lineno=lineno)
+
+    def parse_list(self) -> nodes.List:
+        token = self.stream.expect("lbracket")
+        items: t.List[nodes.Expr] = []
+        while self.stream.current.type != "rbracket":
+            if items:
+                self.stream.expect("comma")
+            if self.stream.current.type == "rbracket":
+                break
+            items.append(self.parse_expression())
+        self.stream.expect("rbracket")
+        return nodes.List(items, lineno=token.lineno)
+
+    def parse_dict(self) -> nodes.Dict:
+        token = self.stream.expect("lbrace")
+        items: t.List[nodes.Pair] = []
+        while self.stream.current.type != "rbrace":
+            if items:
+                self.stream.expect("comma")
+            if self.stream.current.type == "rbrace":
+                break
+            key = self.parse_expression()
+            self.stream.expect("colon")
+            value = self.parse_expression()
+            items.append(nodes.Pair(key, value, lineno=key.lineno))
+        self.stream.expect("rbrace")
+        return nodes.Dict(items, lineno=token.lineno)
+
+    def parse_postfix(self, node: nodes.Expr) -> nodes.Expr:
+        while True:
+            token_type = self.stream.current.type
+            if token_type == "dot" or token_type == "lbracket":
+                node = self.parse_subscript(node)
+            # calls are valid both after postfix expressions (getattr
+            # and getitem) as well as filters and tests
+            elif token_type == "lparen":
+                node = self.parse_call(node)
+            else:
+                break
+        return node
+
+    def parse_filter_expr(self, node: nodes.Expr) -> nodes.Expr:
+        while True:
+            token_type = self.stream.current.type
+            if token_type == "pipe":
+                node = self.parse_filter(node)  # type: ignore
+            elif token_type == "name" and self.stream.current.value == "is":
+                node = self.parse_test(node)
+            # calls are valid both after postfix expressions (getattr
+            # and getitem) as well as filters and tests
+            elif token_type == "lparen":
+                node = self.parse_call(node)
+            else:
+                break
+        return node
+
+    def parse_subscript(
+        self, node: nodes.Expr
+    ) -> t.Union[nodes.Getattr, nodes.Getitem]:
+        token = next(self.stream)
+        arg: nodes.Expr
+
+        if token.type == "dot":
+            attr_token = self.stream.current
+            next(self.stream)
+            if attr_token.type == "name":
+                return nodes.Getattr(
+                    node, attr_token.value, "load", lineno=token.lineno
+                )
+            elif attr_token.type != "integer":
+                self.fail("expected name or number", attr_token.lineno)
+            arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
+            return nodes.Getitem(node, arg, "load", lineno=token.lineno)
+        if token.type == "lbracket":
+            args: t.List[nodes.Expr] = []
+            while self.stream.current.type != "rbracket":
+                if args:
+                    self.stream.expect("comma")
+                args.append(self.parse_subscribed())
+            self.stream.expect("rbracket")
+            if len(args) == 1:
+                arg = args[0]
+            else:
+                arg = nodes.Tuple(args, "load", lineno=token.lineno)
+            return nodes.Getitem(node, arg, "load", lineno=token.lineno)
+        self.fail("expected subscript expression", token.lineno)
+
+    def parse_subscribed(self) -> nodes.Expr:
+        lineno = self.stream.current.lineno
+        args: t.List[t.Optional[nodes.Expr]]
+
+        if self.stream.current.type == "colon":
+            next(self.stream)
+            args = [None]
+        else:
+            node = self.parse_expression()
+            if self.stream.current.type != "colon":
+                return node
+            next(self.stream)
+            args = [node]
+
+        if self.stream.current.type == "colon":
+            args.append(None)
+        elif self.stream.current.type not in ("rbracket", "comma"):
+            args.append(self.parse_expression())
+        else:
+            args.append(None)
+
+        if self.stream.current.type == "colon":
+            next(self.stream)
+            if self.stream.current.type not in ("rbracket", "comma"):
+                args.append(self.parse_expression())
+            else:
+                args.append(None)
+        else:
+            args.append(None)
+
+        return nodes.Slice(lineno=lineno, *args)
+
+    def parse_call_args(self) -> t.Tuple:
+        token = self.stream.expect("lparen")
+        args = []
+        kwargs = []
+        dyn_args = None
+        dyn_kwargs = None
+        require_comma = False
+
+        def ensure(expr: bool) -> None:
+            if not expr:
+                self.fail("invalid syntax for function call expression", token.lineno)
+
+        while self.stream.current.type != "rparen":
+            if require_comma:
+                self.stream.expect("comma")
+
+                # support for trailing comma
+                if self.stream.current.type == "rparen":
+                    break
+
+            if self.stream.current.type == "mul":
+                ensure(dyn_args is None and dyn_kwargs is None)
+                next(self.stream)
+                dyn_args = self.parse_expression()
+            elif self.stream.current.type == "pow":
+                ensure(dyn_kwargs is None)
+                next(self.stream)
+                dyn_kwargs = self.parse_expression()
+            else:
+                if (
+                    self.stream.current.type == "name"
+                    and self.stream.look().type == "assign"
+                ):
+                    # Parsing a kwarg
+                    ensure(dyn_kwargs is None)
+                    key = self.stream.current.value
+                    self.stream.skip(2)
+                    value = self.parse_expression()
+                    kwargs.append(nodes.Keyword(key, value, lineno=value.lineno))
+                else:
+                    # Parsing an arg
+                    ensure(dyn_args is None and dyn_kwargs is None and not kwargs)
+                    args.append(self.parse_expression())
+
+            require_comma = True
+
+        self.stream.expect("rparen")
+        return args, kwargs, dyn_args, dyn_kwargs
+
+    def parse_call(self, node: nodes.Expr) -> nodes.Call:
+        # The lparen will be expected in parse_call_args, but the lineno
+        # needs to be recorded before the stream is advanced.
+        token = self.stream.current
+        args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args()
+        return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno)
+
+    def parse_filter(
+        self, node: t.Optional[nodes.Expr], start_inline: bool = False
+    ) -> t.Optional[nodes.Expr]:
+        while self.stream.current.type == "pipe" or start_inline:
+            if not start_inline:
+                next(self.stream)
+            token = self.stream.expect("name")
+            name = token.value
+            while self.stream.current.type == "dot":
+                next(self.stream)
+                name += "." + self.stream.expect("name").value
+            if self.stream.current.type == "lparen":
+                args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args()
+            else:
+                args = []
+                kwargs = []
+                dyn_args = dyn_kwargs = None
+            node = nodes.Filter(
+                node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
+            )
+            start_inline = False
+        return node
+
+    def parse_test(self, node: nodes.Expr) -> nodes.Expr:
+        token = next(self.stream)
+        if self.stream.current.test("name:not"):
+            next(self.stream)
+            negated = True
+        else:
+            negated = False
+        name = self.stream.expect("name").value
+        while self.stream.current.type == "dot":
+            next(self.stream)
+            name += "." + self.stream.expect("name").value
+        dyn_args = dyn_kwargs = None
+        kwargs = []
+        if self.stream.current.type == "lparen":
+            args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args()
+        elif self.stream.current.type in {
+            "name",
+            "string",
+            "integer",
+            "float",
+            "lparen",
+            "lbracket",
+            "lbrace",
+        } and not self.stream.current.test_any("name:else", "name:or", "name:and"):
+            if self.stream.current.test("name:is"):
+                self.fail("You cannot chain multiple tests with is")
+            arg_node = self.parse_primary()
+            arg_node = self.parse_postfix(arg_node)
+            args = [arg_node]
+        else:
+            args = []
+        node = nodes.Test(
+            node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
+        )
+        if negated:
+            node = nodes.Not(node, lineno=token.lineno)
+        return node
+
+    def subparse(
+        self, end_tokens: t.Optional[t.Tuple[str, ...]] = None
+    ) -> t.List[nodes.Node]:
+        body: t.List[nodes.Node] = []
+        data_buffer: t.List[nodes.Node] = []
+        add_data = data_buffer.append
+
+        if end_tokens is not None:
+            self._end_token_stack.append(end_tokens)
+
+        def flush_data() -> None:
+            if data_buffer:
+                lineno = data_buffer[0].lineno
+                body.append(nodes.Output(data_buffer[:], lineno=lineno))
+                del data_buffer[:]
+
+        try:
+            while self.stream:
+                token = self.stream.current
+                if token.type == "data":
+                    if token.value:
+                        add_data(nodes.TemplateData(token.value, lineno=token.lineno))
+                    next(self.stream)
+                elif token.type == "variable_begin":
+                    next(self.stream)
+                    add_data(self.parse_tuple(with_condexpr=True))
+                    self.stream.expect("variable_end")
+                elif token.type == "block_begin":
+                    flush_data()
+                    next(self.stream)
+                    if end_tokens is not None and self.stream.current.test_any(
+                        *end_tokens
+                    ):
+                        return body
+                    rv = self.parse_statement()
+                    if isinstance(rv, list):
+                        body.extend(rv)
+                    else:
+                        body.append(rv)
+                    self.stream.expect("block_end")
+                else:
+                    raise AssertionError("internal parsing error")
+
+            flush_data()
+        finally:
+            if end_tokens is not None:
+                self._end_token_stack.pop()
+        return body
+
+    def parse(self) -> nodes.Template:
+        """Parse the whole template into a `Template` node."""
+        result = nodes.Template(self.subparse(), lineno=1)
+        result.set_environment(self.environment)
+        return result
diff --git a/venv/lib/python3.9/site-packages/jinja2/py.typed b/venv/lib/python3.9/site-packages/jinja2/py.typed
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/py.typed
diff --git a/venv/lib/python3.9/site-packages/jinja2/runtime.py b/venv/lib/python3.9/site-packages/jinja2/runtime.py
new file mode 100644
index 0000000..985842b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/runtime.py
@@ -0,0 +1,1053 @@
+"""The runtime functions and state used by compiled templates."""
+import functools
+import sys
+import typing as t
+from collections import abc
+from itertools import chain
+
+from markupsafe import escape  # noqa: F401
+from markupsafe import Markup
+from markupsafe import soft_str
+
+from .async_utils import auto_aiter
+from .async_utils import auto_await  # noqa: F401
+from .exceptions import TemplateNotFound  # noqa: F401
+from .exceptions import TemplateRuntimeError  # noqa: F401
+from .exceptions import UndefinedError
+from .nodes import EvalContext
+from .utils import _PassArg
+from .utils import concat
+from .utils import internalcode
+from .utils import missing
+from .utils import Namespace  # noqa: F401
+from .utils import object_type_repr
+from .utils import pass_eval_context
+
+V = t.TypeVar("V")
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
+if t.TYPE_CHECKING:
+    import logging
+    import typing_extensions as te
+    from .environment import Environment
+
+    class LoopRenderFunc(te.Protocol):
+        def __call__(
+            self,
+            reciter: t.Iterable[V],
+            loop_render_func: "LoopRenderFunc",
+            depth: int = 0,
+        ) -> str:
+            ...
+
+
+# these variables are exported to the template runtime
+exported = [
+    "LoopContext",
+    "TemplateReference",
+    "Macro",
+    "Markup",
+    "TemplateRuntimeError",
+    "missing",
+    "escape",
+    "markup_join",
+    "str_join",
+    "identity",
+    "TemplateNotFound",
+    "Namespace",
+    "Undefined",
+    "internalcode",
+]
+async_exported = [
+    "AsyncLoopContext",
+    "auto_aiter",
+    "auto_await",
+]
+
+
+def identity(x: V) -> V:
+    """Returns its argument. Useful for certain things in the
+    environment.
+    """
+    return x
+
+
+def markup_join(seq: t.Iterable[t.Any]) -> str:
+    """Concatenation that escapes if necessary and converts to string."""
+    buf = []
+    iterator = map(soft_str, seq)
+    for arg in iterator:
+        buf.append(arg)
+        if hasattr(arg, "__html__"):
+            return Markup("").join(chain(buf, iterator))
+    return concat(buf)
+
+
+def str_join(seq: t.Iterable[t.Any]) -> str:
+    """Simple args to string conversion and concatenation."""
+    return concat(map(str, seq))
+
+
+def new_context(
+    environment: "Environment",
+    template_name: t.Optional[str],
+    blocks: t.Dict[str, t.Callable[["Context"], t.Iterator[str]]],
+    vars: t.Optional[t.Dict[str, t.Any]] = None,
+    shared: bool = False,
+    globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    locals: t.Optional[t.Mapping[str, t.Any]] = None,
+) -> "Context":
+    """Internal helper for context creation."""
+    if vars is None:
+        vars = {}
+    if shared:
+        parent = vars
+    else:
+        parent = dict(globals or (), **vars)
+    if locals:
+        # if the parent is shared a copy should be created because
+        # we don't want to modify the dict passed
+        if shared:
+            parent = dict(parent)
+        for key, value in locals.items():
+            if value is not missing:
+                parent[key] = value
+    return environment.context_class(
+        environment, parent, template_name, blocks, globals=globals
+    )
+
+
+class TemplateReference:
+    """The `self` in templates."""
+
+    def __init__(self, context: "Context") -> None:
+        self.__context = context
+
+    def __getitem__(self, name: str) -> t.Any:
+        blocks = self.__context.blocks[name]
+        return BlockReference(name, self.__context, blocks, 0)
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} {self.__context.name!r}>"
+
+
+def _dict_method_all(dict_method: F) -> F:
+    @functools.wraps(dict_method)
+    def f_all(self: "Context") -> t.Any:
+        return dict_method(self.get_all())
+
+    return t.cast(F, f_all)
+
+
+@abc.Mapping.register
+class Context:
+    """The template context holds the variables of a template.  It stores the
+    values passed to the template and also the names the template exports.
+    Creating instances is neither supported nor useful as it's created
+    automatically at various stages of the template evaluation and should not
+    be created by hand.
+
+    The context is immutable.  Modifications on :attr:`parent` **must not**
+    happen and modifications on :attr:`vars` are allowed from generated
+    template code only.  Template filters and global functions marked as
+    :func:`pass_context` get the active context passed as first argument
+    and are allowed to access the context read-only.
+
+    The template context supports read only dict operations (`get`,
+    `keys`, `values`, `items`, `iterkeys`, `itervalues`, `iteritems`,
+    `__getitem__`, `__contains__`).  Additionally there is a :meth:`resolve`
+    method that doesn't fail with a `KeyError` but returns an
+    :class:`Undefined` object for missing variables.
+    """
+
+    def __init__(
+        self,
+        environment: "Environment",
+        parent: t.Dict[str, t.Any],
+        name: t.Optional[str],
+        blocks: t.Dict[str, t.Callable[["Context"], t.Iterator[str]]],
+        globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
+    ):
+        self.parent = parent
+        self.vars: t.Dict[str, t.Any] = {}
+        self.environment: "Environment" = environment
+        self.eval_ctx = EvalContext(self.environment, name)
+        self.exported_vars: t.Set[str] = set()
+        self.name = name
+        self.globals_keys = set() if globals is None else set(globals)
+
+        # create the initial mapping of blocks.  Whenever template inheritance
+        # takes place the runtime will update this mapping with the new blocks
+        # from the template.
+        self.blocks = {k: [v] for k, v in blocks.items()}
+
+    def super(
+        self, name: str, current: t.Callable[["Context"], t.Iterator[str]]
+    ) -> t.Union["BlockReference", "Undefined"]:
+        """Render a parent block."""
+        try:
+            blocks = self.blocks[name]
+            index = blocks.index(current) + 1
+            blocks[index]
+        except LookupError:
+            return self.environment.undefined(
+                f"there is no parent block called {name!r}.", name="super"
+            )
+        return BlockReference(name, self, blocks, index)
+
+    def get(self, key: str, default: t.Any = None) -> t.Any:
+        """Look up a variable by name, or return a default if the key is
+        not found.
+
+        :param key: The variable name to look up.
+        :param default: The value to return if the key is not found.
+        """
+        try:
+            return self[key]
+        except KeyError:
+            return default
+
+    def resolve(self, key: str) -> t.Union[t.Any, "Undefined"]:
+        """Look up a variable by name, or return an :class:`Undefined`
+        object if the key is not found.
+
+        If you need to add custom behavior, override
+        :meth:`resolve_or_missing`, not this method. The various lookup
+        functions use that method, not this one.
+
+        :param key: The variable name to look up.
+        """
+        rv = self.resolve_or_missing(key)
+
+        if rv is missing:
+            return self.environment.undefined(name=key)
+
+        return rv
+
+    def resolve_or_missing(self, key: str) -> t.Any:
+        """Look up a variable by name, or return a ``missing`` sentinel
+        if the key is not found.
+
+        Override this method to add custom lookup behavior.
+        :meth:`resolve`, :meth:`get`, and :meth:`__getitem__` use this
+        method. Don't call this method directly.
+
+        :param key: The variable name to look up.
+        """
+        if key in self.vars:
+            return self.vars[key]
+
+        if key in self.parent:
+            return self.parent[key]
+
+        return missing
+
+    def get_exported(self) -> t.Dict[str, t.Any]:
+        """Get a new dict with the exported variables."""
+        return {k: self.vars[k] for k in self.exported_vars}
+
+    def get_all(self) -> t.Dict[str, t.Any]:
+        """Return the complete context as dict including the exported
+        variables.  For optimizations reasons this might not return an
+        actual copy so be careful with using it.
+        """
+        if not self.vars:
+            return self.parent
+        if not self.parent:
+            return self.vars
+        return dict(self.parent, **self.vars)
+
+    @internalcode
+    def call(
+        __self, __obj: t.Callable, *args: t.Any, **kwargs: t.Any  # noqa: B902
+    ) -> t.Union[t.Any, "Undefined"]:
+        """Call the callable with the arguments and keyword arguments
+        provided but inject the active context or environment as first
+        argument if the callable has :func:`pass_context` or
+        :func:`pass_environment`.
+        """
+        if __debug__:
+            __traceback_hide__ = True  # noqa
+
+        # Allow callable classes to take a context
+        if (
+            hasattr(__obj, "__call__")  # noqa: B004
+            and _PassArg.from_obj(__obj.__call__) is not None  # type: ignore
+        ):
+            __obj = __obj.__call__  # type: ignore
+
+        pass_arg = _PassArg.from_obj(__obj)
+
+        if pass_arg is _PassArg.context:
+            # the active context should have access to variables set in
+            # loops and blocks without mutating the context itself
+            if kwargs.get("_loop_vars"):
+                __self = __self.derived(kwargs["_loop_vars"])
+            if kwargs.get("_block_vars"):
+                __self = __self.derived(kwargs["_block_vars"])
+            args = (__self,) + args
+        elif pass_arg is _PassArg.eval_context:
+            args = (__self.eval_ctx,) + args
+        elif pass_arg is _PassArg.environment:
+            args = (__self.environment,) + args
+
+        kwargs.pop("_block_vars", None)
+        kwargs.pop("_loop_vars", None)
+
+        try:
+            return __obj(*args, **kwargs)
+        except StopIteration:
+            return __self.environment.undefined(
+                "value was undefined because a callable raised a"
+                " StopIteration exception"
+            )
+
+    def derived(self, locals: t.Optional[t.Dict[str, t.Any]] = None) -> "Context":
+        """Internal helper function to create a derived context.  This is
+        used in situations where the system needs a new context in the same
+        template that is independent.
+        """
+        context = new_context(
+            self.environment, self.name, {}, self.get_all(), True, None, locals
+        )
+        context.eval_ctx = self.eval_ctx
+        context.blocks.update((k, list(v)) for k, v in self.blocks.items())
+        return context
+
+    keys = _dict_method_all(dict.keys)
+    values = _dict_method_all(dict.values)
+    items = _dict_method_all(dict.items)
+
+    def __contains__(self, name: str) -> bool:
+        return name in self.vars or name in self.parent
+
+    def __getitem__(self, key: str) -> t.Any:
+        """Look up a variable by name with ``[]`` syntax, or raise a
+        ``KeyError`` if the key is not found.
+        """
+        item = self.resolve_or_missing(key)
+
+        if item is missing:
+            raise KeyError(key)
+
+        return item
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} {self.get_all()!r} of {self.name!r}>"
+
+
+class BlockReference:
+    """One block on a template reference."""
+
+    def __init__(
+        self,
+        name: str,
+        context: "Context",
+        stack: t.List[t.Callable[["Context"], t.Iterator[str]]],
+        depth: int,
+    ) -> None:
+        self.name = name
+        self._context = context
+        self._stack = stack
+        self._depth = depth
+
+    @property
+    def super(self) -> t.Union["BlockReference", "Undefined"]:
+        """Super the block."""
+        if self._depth + 1 >= len(self._stack):
+            return self._context.environment.undefined(
+                f"there is no parent block called {self.name!r}.", name="super"
+            )
+        return BlockReference(self.name, self._context, self._stack, self._depth + 1)
+
+    @internalcode
+    async def _async_call(self) -> str:
+        rv = concat(
+            [x async for x in self._stack[self._depth](self._context)]  # type: ignore
+        )
+
+        if self._context.eval_ctx.autoescape:
+            return Markup(rv)
+
+        return rv
+
+    @internalcode
+    def __call__(self) -> str:
+        if self._context.environment.is_async:
+            return self._async_call()  # type: ignore
+
+        rv = concat(self._stack[self._depth](self._context))
+
+        if self._context.eval_ctx.autoescape:
+            return Markup(rv)
+
+        return rv
+
+
+class LoopContext:
+    """A wrapper iterable for dynamic ``for`` loops, with information
+    about the loop and iteration.
+    """
+
+    #: Current iteration of the loop, starting at 0.
+    index0 = -1
+
+    _length: t.Optional[int] = None
+    _after: t.Any = missing
+    _current: t.Any = missing
+    _before: t.Any = missing
+    _last_changed_value: t.Any = missing
+
+    def __init__(
+        self,
+        iterable: t.Iterable[V],
+        undefined: t.Type["Undefined"],
+        recurse: t.Optional["LoopRenderFunc"] = None,
+        depth0: int = 0,
+    ) -> None:
+        """
+        :param iterable: Iterable to wrap.
+        :param undefined: :class:`Undefined` class to use for next and
+            previous items.
+        :param recurse: The function to render the loop body when the
+            loop is marked recursive.
+        :param depth0: Incremented when looping recursively.
+        """
+        self._iterable = iterable
+        self._iterator = self._to_iterator(iterable)
+        self._undefined = undefined
+        self._recurse = recurse
+        #: How many levels deep a recursive loop currently is, starting at 0.
+        self.depth0 = depth0
+
+    @staticmethod
+    def _to_iterator(iterable: t.Iterable[V]) -> t.Iterator[V]:
+        return iter(iterable)
+
+    @property
+    def length(self) -> int:
+        """Length of the iterable.
+
+        If the iterable is a generator or otherwise does not have a
+        size, it is eagerly evaluated to get a size.
+        """
+        if self._length is not None:
+            return self._length
+
+        try:
+            self._length = len(self._iterable)  # type: ignore
+        except TypeError:
+            iterable = list(self._iterator)
+            self._iterator = self._to_iterator(iterable)
+            self._length = len(iterable) + self.index + (self._after is not missing)
+
+        return self._length
+
+    def __len__(self) -> int:
+        return self.length
+
+    @property
+    def depth(self) -> int:
+        """How many levels deep a recursive loop currently is, starting at 1."""
+        return self.depth0 + 1
+
+    @property
+    def index(self) -> int:
+        """Current iteration of the loop, starting at 1."""
+        return self.index0 + 1
+
+    @property
+    def revindex0(self) -> int:
+        """Number of iterations from the end of the loop, ending at 0.
+
+        Requires calculating :attr:`length`.
+        """
+        return self.length - self.index
+
+    @property
+    def revindex(self) -> int:
+        """Number of iterations from the end of the loop, ending at 1.
+
+        Requires calculating :attr:`length`.
+        """
+        return self.length - self.index0
+
+    @property
+    def first(self) -> bool:
+        """Whether this is the first iteration of the loop."""
+        return self.index0 == 0
+
+    def _peek_next(self) -> t.Any:
+        """Return the next element in the iterable, or :data:`missing`
+        if the iterable is exhausted. Only peeks one item ahead, caching
+        the result in :attr:`_last` for use in subsequent checks. The
+        cache is reset when :meth:`__next__` is called.
+        """
+        if self._after is not missing:
+            return self._after
+
+        self._after = next(self._iterator, missing)
+        return self._after
+
+    @property
+    def last(self) -> bool:
+        """Whether this is the last iteration of the loop.
+
+        Causes the iterable to advance early. See
+        :func:`itertools.groupby` for issues this can cause.
+        The :func:`groupby` filter avoids that issue.
+        """
+        return self._peek_next() is missing
+
+    @property
+    def previtem(self) -> t.Union[t.Any, "Undefined"]:
+        """The item in the previous iteration. Undefined during the
+        first iteration.
+        """
+        if self.first:
+            return self._undefined("there is no previous item")
+
+        return self._before
+
+    @property
+    def nextitem(self) -> t.Union[t.Any, "Undefined"]:
+        """The item in the next iteration. Undefined during the last
+        iteration.
+
+        Causes the iterable to advance early. See
+        :func:`itertools.groupby` for issues this can cause.
+        The :func:`jinja-filters.groupby` filter avoids that issue.
+        """
+        rv = self._peek_next()
+
+        if rv is missing:
+            return self._undefined("there is no next item")
+
+        return rv
+
+    def cycle(self, *args: V) -> V:
+        """Return a value from the given args, cycling through based on
+        the current :attr:`index0`.
+
+        :param args: One or more values to cycle through.
+        """
+        if not args:
+            raise TypeError("no items for cycling given")
+
+        return args[self.index0 % len(args)]
+
+    def changed(self, *value: t.Any) -> bool:
+        """Return ``True`` if previously called with a different value
+        (including when called for the first time).
+
+        :param value: One or more values to compare to the last call.
+        """
+        if self._last_changed_value != value:
+            self._last_changed_value = value
+            return True
+
+        return False
+
+    def __iter__(self) -> "LoopContext":
+        return self
+
+    def __next__(self) -> t.Tuple[t.Any, "LoopContext"]:
+        if self._after is not missing:
+            rv = self._after
+            self._after = missing
+        else:
+            rv = next(self._iterator)
+
+        self.index0 += 1
+        self._before = self._current
+        self._current = rv
+        return rv, self
+
+    @internalcode
+    def __call__(self, iterable: t.Iterable[V]) -> str:
+        """When iterating over nested data, render the body of the loop
+        recursively with the given inner iterable data.
+
+        The loop must have the ``recursive`` marker for this to work.
+        """
+        if self._recurse is None:
+            raise TypeError(
+                "The loop must have the 'recursive' marker to be called recursively."
+            )
+
+        return self._recurse(iterable, self._recurse, depth=self.depth)
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} {self.index}/{self.length}>"
+
+
+class AsyncLoopContext(LoopContext):
+    _iterator: t.AsyncIterator[t.Any]  # type: ignore
+
+    @staticmethod
+    def _to_iterator(  # type: ignore
+        iterable: t.Union[t.Iterable[V], t.AsyncIterable[V]]
+    ) -> t.AsyncIterator[V]:
+        return auto_aiter(iterable)
+
+    @property
+    async def length(self) -> int:  # type: ignore
+        if self._length is not None:
+            return self._length
+
+        try:
+            self._length = len(self._iterable)  # type: ignore
+        except TypeError:
+            iterable = [x async for x in self._iterator]
+            self._iterator = self._to_iterator(iterable)
+            self._length = len(iterable) + self.index + (self._after is not missing)
+
+        return self._length
+
+    @property
+    async def revindex0(self) -> int:  # type: ignore
+        return await self.length - self.index
+
+    @property
+    async def revindex(self) -> int:  # type: ignore
+        return await self.length - self.index0
+
+    async def _peek_next(self) -> t.Any:
+        if self._after is not missing:
+            return self._after
+
+        try:
+            self._after = await self._iterator.__anext__()
+        except StopAsyncIteration:
+            self._after = missing
+
+        return self._after
+
+    @property
+    async def last(self) -> bool:  # type: ignore
+        return await self._peek_next() is missing
+
+    @property
+    async def nextitem(self) -> t.Union[t.Any, "Undefined"]:
+        rv = await self._peek_next()
+
+        if rv is missing:
+            return self._undefined("there is no next item")
+
+        return rv
+
+    def __aiter__(self) -> "AsyncLoopContext":
+        return self
+
+    async def __anext__(self) -> t.Tuple[t.Any, "AsyncLoopContext"]:
+        if self._after is not missing:
+            rv = self._after
+            self._after = missing
+        else:
+            rv = await self._iterator.__anext__()
+
+        self.index0 += 1
+        self._before = self._current
+        self._current = rv
+        return rv, self
+
+
+class Macro:
+    """Wraps a macro function."""
+
+    def __init__(
+        self,
+        environment: "Environment",
+        func: t.Callable[..., str],
+        name: str,
+        arguments: t.List[str],
+        catch_kwargs: bool,
+        catch_varargs: bool,
+        caller: bool,
+        default_autoescape: t.Optional[bool] = None,
+    ):
+        self._environment = environment
+        self._func = func
+        self._argument_count = len(arguments)
+        self.name = name
+        self.arguments = arguments
+        self.catch_kwargs = catch_kwargs
+        self.catch_varargs = catch_varargs
+        self.caller = caller
+        self.explicit_caller = "caller" in arguments
+
+        if default_autoescape is None:
+            if callable(environment.autoescape):
+                default_autoescape = environment.autoescape(None)
+            else:
+                default_autoescape = environment.autoescape
+
+        self._default_autoescape = default_autoescape
+
+    @internalcode
+    @pass_eval_context
+    def __call__(self, *args: t.Any, **kwargs: t.Any) -> str:
+        # This requires a bit of explanation,  In the past we used to
+        # decide largely based on compile-time information if a macro is
+        # safe or unsafe.  While there was a volatile mode it was largely
+        # unused for deciding on escaping.  This turns out to be
+        # problematic for macros because whether a macro is safe depends not
+        # on the escape mode when it was defined, but rather when it was used.
+        #
+        # Because however we export macros from the module system and
+        # there are historic callers that do not pass an eval context (and
+        # will continue to not pass one), we need to perform an instance
+        # check here.
+        #
+        # This is considered safe because an eval context is not a valid
+        # argument to callables otherwise anyway.  Worst case here is
+        # that if no eval context is passed we fall back to the compile
+        # time autoescape flag.
+        if args and isinstance(args[0], EvalContext):
+            autoescape = args[0].autoescape
+            args = args[1:]
+        else:
+            autoescape = self._default_autoescape
+
+        # try to consume the positional arguments
+        arguments = list(args[: self._argument_count])
+        off = len(arguments)
+
+        # For information why this is necessary refer to the handling
+        # of caller in the `macro_body` handler in the compiler.
+        found_caller = False
+
+        # if the number of arguments consumed is not the number of
+        # arguments expected we start filling in keyword arguments
+        # and defaults.
+        if off != self._argument_count:
+            for name in self.arguments[len(arguments) :]:
+                try:
+                    value = kwargs.pop(name)
+                except KeyError:
+                    value = missing
+                if name == "caller":
+                    found_caller = True
+                arguments.append(value)
+        else:
+            found_caller = self.explicit_caller
+
+        # it's important that the order of these arguments does not change
+        # if not also changed in the compiler's `function_scoping` method.
+        # the order is caller, keyword arguments, positional arguments!
+        if self.caller and not found_caller:
+            caller = kwargs.pop("caller", None)
+            if caller is None:
+                caller = self._environment.undefined("No caller defined", name="caller")
+            arguments.append(caller)
+
+        if self.catch_kwargs:
+            arguments.append(kwargs)
+        elif kwargs:
+            if "caller" in kwargs:
+                raise TypeError(
+                    f"macro {self.name!r} was invoked with two values for the special"
+                    " caller argument. This is most likely a bug."
+                )
+            raise TypeError(
+                f"macro {self.name!r} takes no keyword argument {next(iter(kwargs))!r}"
+            )
+        if self.catch_varargs:
+            arguments.append(args[self._argument_count :])
+        elif len(args) > self._argument_count:
+            raise TypeError(
+                f"macro {self.name!r} takes not more than"
+                f" {len(self.arguments)} argument(s)"
+            )
+
+        return self._invoke(arguments, autoescape)
+
+    async def _async_invoke(self, arguments: t.List[t.Any], autoescape: bool) -> str:
+        rv = await self._func(*arguments)  # type: ignore
+
+        if autoescape:
+            return Markup(rv)
+
+        return rv  # type: ignore
+
+    def _invoke(self, arguments: t.List[t.Any], autoescape: bool) -> str:
+        if self._environment.is_async:
+            return self._async_invoke(arguments, autoescape)  # type: ignore
+
+        rv = self._func(*arguments)
+
+        if autoescape:
+            rv = Markup(rv)
+
+        return rv
+
+    def __repr__(self) -> str:
+        name = "anonymous" if self.name is None else repr(self.name)
+        return f"<{type(self).__name__} {name}>"
+
+
+class Undefined:
+    """The default undefined type.  This undefined type can be printed and
+    iterated over, but every other access will raise an :exc:`UndefinedError`:
+
+    >>> foo = Undefined(name='foo')
+    >>> str(foo)
+    ''
+    >>> not foo
+    True
+    >>> foo + 42
+    Traceback (most recent call last):
+      ...
+    jinja2.exceptions.UndefinedError: 'foo' is undefined
+    """
+
+    __slots__ = (
+        "_undefined_hint",
+        "_undefined_obj",
+        "_undefined_name",
+        "_undefined_exception",
+    )
+
+    def __init__(
+        self,
+        hint: t.Optional[str] = None,
+        obj: t.Any = missing,
+        name: t.Optional[str] = None,
+        exc: t.Type[TemplateRuntimeError] = UndefinedError,
+    ) -> None:
+        self._undefined_hint = hint
+        self._undefined_obj = obj
+        self._undefined_name = name
+        self._undefined_exception = exc
+
+    @property
+    def _undefined_message(self) -> str:
+        """Build a message about the undefined value based on how it was
+        accessed.
+        """
+        if self._undefined_hint:
+            return self._undefined_hint
+
+        if self._undefined_obj is missing:
+            return f"{self._undefined_name!r} is undefined"
+
+        if not isinstance(self._undefined_name, str):
+            return (
+                f"{object_type_repr(self._undefined_obj)} has no"
+                f" element {self._undefined_name!r}"
+            )
+
+        return (
+            f"{object_type_repr(self._undefined_obj)!r} has no"
+            f" attribute {self._undefined_name!r}"
+        )
+
+    @internalcode
+    def _fail_with_undefined_error(
+        self, *args: t.Any, **kwargs: t.Any
+    ) -> "te.NoReturn":
+        """Raise an :exc:`UndefinedError` when operations are performed
+        on the undefined value.
+        """
+        raise self._undefined_exception(self._undefined_message)
+
+    @internalcode
+    def __getattr__(self, name: str) -> t.Any:
+        if name[:2] == "__":
+            raise AttributeError(name)
+
+        return self._fail_with_undefined_error()
+
+    __add__ = __radd__ = __sub__ = __rsub__ = _fail_with_undefined_error
+    __mul__ = __rmul__ = __div__ = __rdiv__ = _fail_with_undefined_error
+    __truediv__ = __rtruediv__ = _fail_with_undefined_error
+    __floordiv__ = __rfloordiv__ = _fail_with_undefined_error
+    __mod__ = __rmod__ = _fail_with_undefined_error
+    __pos__ = __neg__ = _fail_with_undefined_error
+    __call__ = __getitem__ = _fail_with_undefined_error
+    __lt__ = __le__ = __gt__ = __ge__ = _fail_with_undefined_error
+    __int__ = __float__ = __complex__ = _fail_with_undefined_error
+    __pow__ = __rpow__ = _fail_with_undefined_error
+
+    def __eq__(self, other: t.Any) -> bool:
+        return type(self) is type(other)
+
+    def __ne__(self, other: t.Any) -> bool:
+        return not self.__eq__(other)
+
+    def __hash__(self) -> int:
+        return id(type(self))
+
+    def __str__(self) -> str:
+        return ""
+
+    def __len__(self) -> int:
+        return 0
+
+    def __iter__(self) -> t.Iterator[t.Any]:
+        yield from ()
+
+    async def __aiter__(self) -> t.AsyncIterator[t.Any]:
+        for _ in ():
+            yield
+
+    def __bool__(self) -> bool:
+        return False
+
+    def __repr__(self) -> str:
+        return "Undefined"
+
+
+def make_logging_undefined(
+    logger: t.Optional["logging.Logger"] = None, base: t.Type[Undefined] = Undefined
+) -> t.Type[Undefined]:
+    """Given a logger object this returns a new undefined class that will
+    log certain failures.  It will log iterations and printing.  If no
+    logger is given a default logger is created.
+
+    Example::
+
+        logger = logging.getLogger(__name__)
+        LoggingUndefined = make_logging_undefined(
+            logger=logger,
+            base=Undefined
+        )
+
+    .. versionadded:: 2.8
+
+    :param logger: the logger to use.  If not provided, a default logger
+                   is created.
+    :param base: the base class to add logging functionality to.  This
+                 defaults to :class:`Undefined`.
+    """
+    if logger is None:
+        import logging
+
+        logger = logging.getLogger(__name__)
+        logger.addHandler(logging.StreamHandler(sys.stderr))
+
+    def _log_message(undef: Undefined) -> None:
+        logger.warning(  # type: ignore
+            "Template variable warning: %s", undef._undefined_message
+        )
+
+    class LoggingUndefined(base):  # type: ignore
+        __slots__ = ()
+
+        def _fail_with_undefined_error(  # type: ignore
+            self, *args: t.Any, **kwargs: t.Any
+        ) -> "te.NoReturn":
+            try:
+                super()._fail_with_undefined_error(*args, **kwargs)
+            except self._undefined_exception as e:
+                logger.error("Template variable error: %s", e)  # type: ignore
+                raise e
+
+        def __str__(self) -> str:
+            _log_message(self)
+            return super().__str__()  # type: ignore
+
+        def __iter__(self) -> t.Iterator[t.Any]:
+            _log_message(self)
+            return super().__iter__()  # type: ignore
+
+        def __bool__(self) -> bool:
+            _log_message(self)
+            return super().__bool__()  # type: ignore
+
+    return LoggingUndefined
+
+
+class ChainableUndefined(Undefined):
+    """An undefined that is chainable, where both ``__getattr__`` and
+    ``__getitem__`` return itself rather than raising an
+    :exc:`UndefinedError`.
+
+    >>> foo = ChainableUndefined(name='foo')
+    >>> str(foo.bar['baz'])
+    ''
+    >>> foo.bar['baz'] + 42
+    Traceback (most recent call last):
+      ...
+    jinja2.exceptions.UndefinedError: 'foo' is undefined
+
+    .. versionadded:: 2.11.0
+    """
+
+    __slots__ = ()
+
+    def __html__(self) -> str:
+        return str(self)
+
+    def __getattr__(self, _: str) -> "ChainableUndefined":
+        return self
+
+    __getitem__ = __getattr__  # type: ignore
+
+
+class DebugUndefined(Undefined):
+    """An undefined that returns the debug info when printed.
+
+    >>> foo = DebugUndefined(name='foo')
+    >>> str(foo)
+    '{{ foo }}'
+    >>> not foo
+    True
+    >>> foo + 42
+    Traceback (most recent call last):
+      ...
+    jinja2.exceptions.UndefinedError: 'foo' is undefined
+    """
+
+    __slots__ = ()
+
+    def __str__(self) -> str:
+        if self._undefined_hint:
+            message = f"undefined value printed: {self._undefined_hint}"
+
+        elif self._undefined_obj is missing:
+            message = self._undefined_name  # type: ignore
+
+        else:
+            message = (
+                f"no such element: {object_type_repr(self._undefined_obj)}"
+                f"[{self._undefined_name!r}]"
+            )
+
+        return f"{{{{ {message} }}}}"
+
+
+class StrictUndefined(Undefined):
+    """An undefined that barks on print and iteration as well as boolean
+    tests and all kinds of comparisons.  In other words: you can do nothing
+    with it except checking if it's defined using the `defined` test.
+
+    >>> foo = StrictUndefined(name='foo')
+    >>> str(foo)
+    Traceback (most recent call last):
+      ...
+    jinja2.exceptions.UndefinedError: 'foo' is undefined
+    >>> not foo
+    Traceback (most recent call last):
+      ...
+    jinja2.exceptions.UndefinedError: 'foo' is undefined
+    >>> foo + 42
+    Traceback (most recent call last):
+      ...
+    jinja2.exceptions.UndefinedError: 'foo' is undefined
+    """
+
+    __slots__ = ()
+    __iter__ = __str__ = __len__ = Undefined._fail_with_undefined_error
+    __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error
+    __contains__ = Undefined._fail_with_undefined_error
+
+
+# Remove slots attributes, after the metaclass is applied they are
+# unneeded and contain wrong data for subclasses.
+del (
+    Undefined.__slots__,
+    ChainableUndefined.__slots__,
+    DebugUndefined.__slots__,
+    StrictUndefined.__slots__,
+)
diff --git a/venv/lib/python3.9/site-packages/jinja2/sandbox.py b/venv/lib/python3.9/site-packages/jinja2/sandbox.py
new file mode 100644
index 0000000..06d7414
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/sandbox.py
@@ -0,0 +1,428 @@
+"""A sandbox layer that ensures unsafe operations cannot be performed.
+Useful when the template itself comes from an untrusted source.
+"""
+import operator
+import types
+import typing as t
+from _string import formatter_field_name_split  # type: ignore
+from collections import abc
+from collections import deque
+from string import Formatter
+
+from markupsafe import EscapeFormatter
+from markupsafe import Markup
+
+from .environment import Environment
+from .exceptions import SecurityError
+from .runtime import Context
+from .runtime import Undefined
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
+#: maximum number of items a range may produce
+MAX_RANGE = 100000
+
+#: Unsafe function attributes.
+UNSAFE_FUNCTION_ATTRIBUTES: t.Set[str] = set()
+
+#: Unsafe method attributes. Function attributes are unsafe for methods too.
+UNSAFE_METHOD_ATTRIBUTES: t.Set[str] = set()
+
+#: unsafe generator attributes.
+UNSAFE_GENERATOR_ATTRIBUTES = {"gi_frame", "gi_code"}
+
+#: unsafe attributes on coroutines
+UNSAFE_COROUTINE_ATTRIBUTES = {"cr_frame", "cr_code"}
+
+#: unsafe attributes on async generators
+UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {"ag_code", "ag_frame"}
+
+_mutable_spec: t.Tuple[t.Tuple[t.Type, t.FrozenSet[str]], ...] = (
+    (
+        abc.MutableSet,
+        frozenset(
+            [
+                "add",
+                "clear",
+                "difference_update",
+                "discard",
+                "pop",
+                "remove",
+                "symmetric_difference_update",
+                "update",
+            ]
+        ),
+    ),
+    (
+        abc.MutableMapping,
+        frozenset(["clear", "pop", "popitem", "setdefault", "update"]),
+    ),
+    (
+        abc.MutableSequence,
+        frozenset(["append", "reverse", "insert", "sort", "extend", "remove"]),
+    ),
+    (
+        deque,
+        frozenset(
+            [
+                "append",
+                "appendleft",
+                "clear",
+                "extend",
+                "extendleft",
+                "pop",
+                "popleft",
+                "remove",
+                "rotate",
+            ]
+        ),
+    ),
+)
+
+
+def inspect_format_method(callable: t.Callable) -> t.Optional[str]:
+    if not isinstance(
+        callable, (types.MethodType, types.BuiltinMethodType)
+    ) or callable.__name__ not in ("format", "format_map"):
+        return None
+
+    obj = callable.__self__
+
+    if isinstance(obj, str):
+        return obj
+
+    return None
+
+
+def safe_range(*args: int) -> range:
+    """A range that can't generate ranges with a length of more than
+    MAX_RANGE items.
+    """
+    rng = range(*args)
+
+    if len(rng) > MAX_RANGE:
+        raise OverflowError(
+            "Range too big. The sandbox blocks ranges larger than"
+            f" MAX_RANGE ({MAX_RANGE})."
+        )
+
+    return rng
+
+
+def unsafe(f: F) -> F:
+    """Marks a function or method as unsafe.
+
+    .. code-block: python
+
+        @unsafe
+        def delete(self):
+            pass
+    """
+    f.unsafe_callable = True  # type: ignore
+    return f
+
+
+def is_internal_attribute(obj: t.Any, attr: str) -> bool:
+    """Test if the attribute given is an internal python attribute.  For
+    example this function returns `True` for the `func_code` attribute of
+    python objects.  This is useful if the environment method
+    :meth:`~SandboxedEnvironment.is_safe_attribute` is overridden.
+
+    >>> from jinja2.sandbox import is_internal_attribute
+    >>> is_internal_attribute(str, "mro")
+    True
+    >>> is_internal_attribute(str, "upper")
+    False
+    """
+    if isinstance(obj, types.FunctionType):
+        if attr in UNSAFE_FUNCTION_ATTRIBUTES:
+            return True
+    elif isinstance(obj, types.MethodType):
+        if attr in UNSAFE_FUNCTION_ATTRIBUTES or attr in UNSAFE_METHOD_ATTRIBUTES:
+            return True
+    elif isinstance(obj, type):
+        if attr == "mro":
+            return True
+    elif isinstance(obj, (types.CodeType, types.TracebackType, types.FrameType)):
+        return True
+    elif isinstance(obj, types.GeneratorType):
+        if attr in UNSAFE_GENERATOR_ATTRIBUTES:
+            return True
+    elif hasattr(types, "CoroutineType") and isinstance(obj, types.CoroutineType):
+        if attr in UNSAFE_COROUTINE_ATTRIBUTES:
+            return True
+    elif hasattr(types, "AsyncGeneratorType") and isinstance(
+        obj, types.AsyncGeneratorType
+    ):
+        if attr in UNSAFE_ASYNC_GENERATOR_ATTRIBUTES:
+            return True
+    return attr.startswith("__")
+
+
+def modifies_known_mutable(obj: t.Any, attr: str) -> bool:
+    """This function checks if an attribute on a builtin mutable object
+    (list, dict, set or deque) or the corresponding ABCs would modify it
+    if called.
+
+    >>> modifies_known_mutable({}, "clear")
+    True
+    >>> modifies_known_mutable({}, "keys")
+    False
+    >>> modifies_known_mutable([], "append")
+    True
+    >>> modifies_known_mutable([], "index")
+    False
+
+    If called with an unsupported object, ``False`` is returned.
+
+    >>> modifies_known_mutable("foo", "upper")
+    False
+    """
+    for typespec, unsafe in _mutable_spec:
+        if isinstance(obj, typespec):
+            return attr in unsafe
+    return False
+
+
+class SandboxedEnvironment(Environment):
+    """The sandboxed environment.  It works like the regular environment but
+    tells the compiler to generate sandboxed code.  Additionally subclasses of
+    this environment may override the methods that tell the runtime what
+    attributes or functions are safe to access.
+
+    If the template tries to access insecure code a :exc:`SecurityError` is
+    raised.  However also other exceptions may occur during the rendering so
+    the caller has to ensure that all exceptions are caught.
+    """
+
+    sandboxed = True
+
+    #: default callback table for the binary operators.  A copy of this is
+    #: available on each instance of a sandboxed environment as
+    #: :attr:`binop_table`
+    default_binop_table: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {
+        "+": operator.add,
+        "-": operator.sub,
+        "*": operator.mul,
+        "/": operator.truediv,
+        "//": operator.floordiv,
+        "**": operator.pow,
+        "%": operator.mod,
+    }
+
+    #: default callback table for the unary operators.  A copy of this is
+    #: available on each instance of a sandboxed environment as
+    #: :attr:`unop_table`
+    default_unop_table: t.Dict[str, t.Callable[[t.Any], t.Any]] = {
+        "+": operator.pos,
+        "-": operator.neg,
+    }
+
+    #: a set of binary operators that should be intercepted.  Each operator
+    #: that is added to this set (empty by default) is delegated to the
+    #: :meth:`call_binop` method that will perform the operator.  The default
+    #: operator callback is specified by :attr:`binop_table`.
+    #:
+    #: The following binary operators are interceptable:
+    #: ``//``, ``%``, ``+``, ``*``, ``-``, ``/``, and ``**``
+    #:
+    #: The default operation form the operator table corresponds to the
+    #: builtin function.  Intercepted calls are always slower than the native
+    #: operator call, so make sure only to intercept the ones you are
+    #: interested in.
+    #:
+    #: .. versionadded:: 2.6
+    intercepted_binops: t.FrozenSet[str] = frozenset()
+
+    #: a set of unary operators that should be intercepted.  Each operator
+    #: that is added to this set (empty by default) is delegated to the
+    #: :meth:`call_unop` method that will perform the operator.  The default
+    #: operator callback is specified by :attr:`unop_table`.
+    #:
+    #: The following unary operators are interceptable: ``+``, ``-``
+    #:
+    #: The default operation form the operator table corresponds to the
+    #: builtin function.  Intercepted calls are always slower than the native
+    #: operator call, so make sure only to intercept the ones you are
+    #: interested in.
+    #:
+    #: .. versionadded:: 2.6
+    intercepted_unops: t.FrozenSet[str] = frozenset()
+
+    def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+        super().__init__(*args, **kwargs)
+        self.globals["range"] = safe_range
+        self.binop_table = self.default_binop_table.copy()
+        self.unop_table = self.default_unop_table.copy()
+
+    def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool:
+        """The sandboxed environment will call this method to check if the
+        attribute of an object is safe to access.  Per default all attributes
+        starting with an underscore are considered private as well as the
+        special attributes of internal python objects as returned by the
+        :func:`is_internal_attribute` function.
+        """
+        return not (attr.startswith("_") or is_internal_attribute(obj, attr))
+
+    def is_safe_callable(self, obj: t.Any) -> bool:
+        """Check if an object is safely callable. By default callables
+        are considered safe unless decorated with :func:`unsafe`.
+
+        This also recognizes the Django convention of setting
+        ``func.alters_data = True``.
+        """
+        return not (
+            getattr(obj, "unsafe_callable", False) or getattr(obj, "alters_data", False)
+        )
+
+    def call_binop(
+        self, context: Context, operator: str, left: t.Any, right: t.Any
+    ) -> t.Any:
+        """For intercepted binary operator calls (:meth:`intercepted_binops`)
+        this function is executed instead of the builtin operator.  This can
+        be used to fine tune the behavior of certain operators.
+
+        .. versionadded:: 2.6
+        """
+        return self.binop_table[operator](left, right)
+
+    def call_unop(self, context: Context, operator: str, arg: t.Any) -> t.Any:
+        """For intercepted unary operator calls (:meth:`intercepted_unops`)
+        this function is executed instead of the builtin operator.  This can
+        be used to fine tune the behavior of certain operators.
+
+        .. versionadded:: 2.6
+        """
+        return self.unop_table[operator](arg)
+
+    def getitem(
+        self, obj: t.Any, argument: t.Union[str, t.Any]
+    ) -> t.Union[t.Any, Undefined]:
+        """Subscribe an object from sandboxed code."""
+        try:
+            return obj[argument]
+        except (TypeError, LookupError):
+            if isinstance(argument, str):
+                try:
+                    attr = str(argument)
+                except Exception:
+                    pass
+                else:
+                    try:
+                        value = getattr(obj, attr)
+                    except AttributeError:
+                        pass
+                    else:
+                        if self.is_safe_attribute(obj, argument, value):
+                            return value
+                        return self.unsafe_undefined(obj, argument)
+        return self.undefined(obj=obj, name=argument)
+
+    def getattr(self, obj: t.Any, attribute: str) -> t.Union[t.Any, Undefined]:
+        """Subscribe an object from sandboxed code and prefer the
+        attribute.  The attribute passed *must* be a bytestring.
+        """
+        try:
+            value = getattr(obj, attribute)
+        except AttributeError:
+            try:
+                return obj[attribute]
+            except (TypeError, LookupError):
+                pass
+        else:
+            if self.is_safe_attribute(obj, attribute, value):
+                return value
+            return self.unsafe_undefined(obj, attribute)
+        return self.undefined(obj=obj, name=attribute)
+
+    def unsafe_undefined(self, obj: t.Any, attribute: str) -> Undefined:
+        """Return an undefined object for unsafe attributes."""
+        return self.undefined(
+            f"access to attribute {attribute!r} of"
+            f" {type(obj).__name__!r} object is unsafe.",
+            name=attribute,
+            obj=obj,
+            exc=SecurityError,
+        )
+
+    def format_string(
+        self,
+        s: str,
+        args: t.Tuple[t.Any, ...],
+        kwargs: t.Dict[str, t.Any],
+        format_func: t.Optional[t.Callable] = None,
+    ) -> str:
+        """If a format call is detected, then this is routed through this
+        method so that our safety sandbox can be used for it.
+        """
+        formatter: SandboxedFormatter
+        if isinstance(s, Markup):
+            formatter = SandboxedEscapeFormatter(self, escape=s.escape)
+        else:
+            formatter = SandboxedFormatter(self)
+
+        if format_func is not None and format_func.__name__ == "format_map":
+            if len(args) != 1 or kwargs:
+                raise TypeError(
+                    "format_map() takes exactly one argument"
+                    f" {len(args) + (kwargs is not None)} given"
+                )
+
+            kwargs = args[0]
+            args = ()
+
+        rv = formatter.vformat(s, args, kwargs)
+        return type(s)(rv)
+
+    def call(
+        __self,  # noqa: B902
+        __context: Context,
+        __obj: t.Any,
+        *args: t.Any,
+        **kwargs: t.Any,
+    ) -> t.Any:
+        """Call an object from sandboxed code."""
+        fmt = inspect_format_method(__obj)
+        if fmt is not None:
+            return __self.format_string(fmt, args, kwargs, __obj)
+
+        # the double prefixes are to avoid double keyword argument
+        # errors when proxying the call.
+        if not __self.is_safe_callable(__obj):
+            raise SecurityError(f"{__obj!r} is not safely callable")
+        return __context.call(__obj, *args, **kwargs)
+
+
+class ImmutableSandboxedEnvironment(SandboxedEnvironment):
+    """Works exactly like the regular `SandboxedEnvironment` but does not
+    permit modifications on the builtin mutable objects `list`, `set`, and
+    `dict` by using the :func:`modifies_known_mutable` function.
+    """
+
+    def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool:
+        if not super().is_safe_attribute(obj, attr, value):
+            return False
+
+        return not modifies_known_mutable(obj, attr)
+
+
+class SandboxedFormatter(Formatter):
+    def __init__(self, env: Environment, **kwargs: t.Any) -> None:
+        self._env = env
+        super().__init__(**kwargs)
+
+    def get_field(
+        self, field_name: str, args: t.Sequence[t.Any], kwargs: t.Mapping[str, t.Any]
+    ) -> t.Tuple[t.Any, str]:
+        first, rest = formatter_field_name_split(field_name)
+        obj = self.get_value(first, args, kwargs)
+        for is_attr, i in rest:
+            if is_attr:
+                obj = self._env.getattr(obj, i)
+            else:
+                obj = self._env.getitem(obj, i)
+        return obj, first
+
+
+class SandboxedEscapeFormatter(SandboxedFormatter, EscapeFormatter):
+    pass
diff --git a/venv/lib/python3.9/site-packages/jinja2/tests.py b/venv/lib/python3.9/site-packages/jinja2/tests.py
new file mode 100644
index 0000000..a467cf0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/tests.py
@@ -0,0 +1,255 @@
+"""Built-in template tests used with the ``is`` operator."""
+import operator
+import typing as t
+from collections import abc
+from numbers import Number
+
+from .runtime import Undefined
+from .utils import pass_environment
+
+if t.TYPE_CHECKING:
+    from .environment import Environment
+
+
+def test_odd(value: int) -> bool:
+    """Return true if the variable is odd."""
+    return value % 2 == 1
+
+
+def test_even(value: int) -> bool:
+    """Return true if the variable is even."""
+    return value % 2 == 0
+
+
+def test_divisibleby(value: int, num: int) -> bool:
+    """Check if a variable is divisible by a number."""
+    return value % num == 0
+
+
+def test_defined(value: t.Any) -> bool:
+    """Return true if the variable is defined:
+
+    .. sourcecode:: jinja
+
+        {% if variable is defined %}
+            value of variable: {{ variable }}
+        {% else %}
+            variable is not defined
+        {% endif %}
+
+    See the :func:`default` filter for a simple way to set undefined
+    variables.
+    """
+    return not isinstance(value, Undefined)
+
+
+def test_undefined(value: t.Any) -> bool:
+    """Like :func:`defined` but the other way round."""
+    return isinstance(value, Undefined)
+
+
+@pass_environment
+def test_filter(env: "Environment", value: str) -> bool:
+    """Check if a filter exists by name. Useful if a filter may be
+    optionally available.
+
+    .. code-block:: jinja
+
+        {% if 'markdown' is filter %}
+            {{ value | markdown }}
+        {% else %}
+            {{ value }}
+        {% endif %}
+
+    .. versionadded:: 3.0
+    """
+    return value in env.filters
+
+
+@pass_environment
+def test_test(env: "Environment", value: str) -> bool:
+    """Check if a test exists by name. Useful if a test may be
+    optionally available.
+
+    .. code-block:: jinja
+
+        {% if 'loud' is test %}
+            {% if value is loud %}
+                {{ value|upper }}
+            {% else %}
+                {{ value|lower }}
+            {% endif %}
+        {% else %}
+            {{ value }}
+        {% endif %}
+
+    .. versionadded:: 3.0
+    """
+    return value in env.tests
+
+
+def test_none(value: t.Any) -> bool:
+    """Return true if the variable is none."""
+    return value is None
+
+
+def test_boolean(value: t.Any) -> bool:
+    """Return true if the object is a boolean value.
+
+    .. versionadded:: 2.11
+    """
+    return value is True or value is False
+
+
+def test_false(value: t.Any) -> bool:
+    """Return true if the object is False.
+
+    .. versionadded:: 2.11
+    """
+    return value is False
+
+
+def test_true(value: t.Any) -> bool:
+    """Return true if the object is True.
+
+    .. versionadded:: 2.11
+    """
+    return value is True
+
+
+# NOTE: The existing 'number' test matches booleans and floats
+def test_integer(value: t.Any) -> bool:
+    """Return true if the object is an integer.
+
+    .. versionadded:: 2.11
+    """
+    return isinstance(value, int) and value is not True and value is not False
+
+
+# NOTE: The existing 'number' test matches booleans and integers
+def test_float(value: t.Any) -> bool:
+    """Return true if the object is a float.
+
+    .. versionadded:: 2.11
+    """
+    return isinstance(value, float)
+
+
+def test_lower(value: str) -> bool:
+    """Return true if the variable is lowercased."""
+    return str(value).islower()
+
+
+def test_upper(value: str) -> bool:
+    """Return true if the variable is uppercased."""
+    return str(value).isupper()
+
+
+def test_string(value: t.Any) -> bool:
+    """Return true if the object is a string."""
+    return isinstance(value, str)
+
+
+def test_mapping(value: t.Any) -> bool:
+    """Return true if the object is a mapping (dict etc.).
+
+    .. versionadded:: 2.6
+    """
+    return isinstance(value, abc.Mapping)
+
+
+def test_number(value: t.Any) -> bool:
+    """Return true if the variable is a number."""
+    return isinstance(value, Number)
+
+
+def test_sequence(value: t.Any) -> bool:
+    """Return true if the variable is a sequence. Sequences are variables
+    that are iterable.
+    """
+    try:
+        len(value)
+        value.__getitem__
+    except Exception:
+        return False
+
+    return True
+
+
+def test_sameas(value: t.Any, other: t.Any) -> bool:
+    """Check if an object points to the same memory address than another
+    object:
+
+    .. sourcecode:: jinja
+
+        {% if foo.attribute is sameas false %}
+            the foo attribute really is the `False` singleton
+        {% endif %}
+    """
+    return value is other
+
+
+def test_iterable(value: t.Any) -> bool:
+    """Check if it's possible to iterate over an object."""
+    try:
+        iter(value)
+    except TypeError:
+        return False
+
+    return True
+
+
+def test_escaped(value: t.Any) -> bool:
+    """Check if the value is escaped."""
+    return hasattr(value, "__html__")
+
+
+def test_in(value: t.Any, seq: t.Container) -> bool:
+    """Check if value is in seq.
+
+    .. versionadded:: 2.10
+    """
+    return value in seq
+
+
+TESTS = {
+    "odd": test_odd,
+    "even": test_even,
+    "divisibleby": test_divisibleby,
+    "defined": test_defined,
+    "undefined": test_undefined,
+    "filter": test_filter,
+    "test": test_test,
+    "none": test_none,
+    "boolean": test_boolean,
+    "false": test_false,
+    "true": test_true,
+    "integer": test_integer,
+    "float": test_float,
+    "lower": test_lower,
+    "upper": test_upper,
+    "string": test_string,
+    "mapping": test_mapping,
+    "number": test_number,
+    "sequence": test_sequence,
+    "iterable": test_iterable,
+    "callable": callable,
+    "sameas": test_sameas,
+    "escaped": test_escaped,
+    "in": test_in,
+    "==": operator.eq,
+    "eq": operator.eq,
+    "equalto": operator.eq,
+    "!=": operator.ne,
+    "ne": operator.ne,
+    ">": operator.gt,
+    "gt": operator.gt,
+    "greaterthan": operator.gt,
+    "ge": operator.ge,
+    ">=": operator.ge,
+    "<": operator.lt,
+    "lt": operator.lt,
+    "lessthan": operator.lt,
+    "<=": operator.le,
+    "le": operator.le,
+}
diff --git a/venv/lib/python3.9/site-packages/jinja2/utils.py b/venv/lib/python3.9/site-packages/jinja2/utils.py
new file mode 100644
index 0000000..9b5f5a5
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/utils.py
@@ -0,0 +1,755 @@
+import enum
+import json
+import os
+import re
+import typing as t
+from collections import abc
+from collections import deque
+from random import choice
+from random import randrange
+from threading import Lock
+from types import CodeType
+from urllib.parse import quote_from_bytes
+
+import markupsafe
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+
+F = t.TypeVar("F", bound=t.Callable[..., t.Any])
+
+# special singleton representing missing values for the runtime
+missing: t.Any = type("MissingType", (), {"__repr__": lambda x: "missing"})()
+
+internal_code: t.MutableSet[CodeType] = set()
+
+concat = "".join
+
+
+def pass_context(f: F) -> F:
+    """Pass the :class:`~jinja2.runtime.Context` as the first argument
+    to the decorated function when called while rendering a template.
+
+    Can be used on functions, filters, and tests.
+
+    If only ``Context.eval_context`` is needed, use
+    :func:`pass_eval_context`. If only ``Context.environment`` is
+    needed, use :func:`pass_environment`.
+
+    .. versionadded:: 3.0.0
+        Replaces ``contextfunction`` and ``contextfilter``.
+    """
+    f.jinja_pass_arg = _PassArg.context  # type: ignore
+    return f
+
+
+def pass_eval_context(f: F) -> F:
+    """Pass the :class:`~jinja2.nodes.EvalContext` as the first argument
+    to the decorated function when called while rendering a template.
+    See :ref:`eval-context`.
+
+    Can be used on functions, filters, and tests.
+
+    If only ``EvalContext.environment`` is needed, use
+    :func:`pass_environment`.
+
+    .. versionadded:: 3.0.0
+        Replaces ``evalcontextfunction`` and ``evalcontextfilter``.
+    """
+    f.jinja_pass_arg = _PassArg.eval_context  # type: ignore
+    return f
+
+
+def pass_environment(f: F) -> F:
+    """Pass the :class:`~jinja2.Environment` as the first argument to
+    the decorated function when called while rendering a template.
+
+    Can be used on functions, filters, and tests.
+
+    .. versionadded:: 3.0.0
+        Replaces ``environmentfunction`` and ``environmentfilter``.
+    """
+    f.jinja_pass_arg = _PassArg.environment  # type: ignore
+    return f
+
+
+class _PassArg(enum.Enum):
+    context = enum.auto()
+    eval_context = enum.auto()
+    environment = enum.auto()
+
+    @classmethod
+    def from_obj(cls, obj: F) -> t.Optional["_PassArg"]:
+        if hasattr(obj, "jinja_pass_arg"):
+            return obj.jinja_pass_arg  # type: ignore
+
+        return None
+
+
+def internalcode(f: F) -> F:
+    """Marks the function as internally used"""
+    internal_code.add(f.__code__)
+    return f
+
+
+def is_undefined(obj: t.Any) -> bool:
+    """Check if the object passed is undefined.  This does nothing more than
+    performing an instance check against :class:`Undefined` but looks nicer.
+    This can be used for custom filters or tests that want to react to
+    undefined variables.  For example a custom default filter can look like
+    this::
+
+        def default(var, default=''):
+            if is_undefined(var):
+                return default
+            return var
+    """
+    from .runtime import Undefined
+
+    return isinstance(obj, Undefined)
+
+
+def consume(iterable: t.Iterable[t.Any]) -> None:
+    """Consumes an iterable without doing anything with it."""
+    for _ in iterable:
+        pass
+
+
+def clear_caches() -> None:
+    """Jinja keeps internal caches for environments and lexers.  These are
+    used so that Jinja doesn't have to recreate environments and lexers all
+    the time.  Normally you don't have to care about that but if you are
+    measuring memory consumption you may want to clean the caches.
+    """
+    from .environment import get_spontaneous_environment
+    from .lexer import _lexer_cache
+
+    get_spontaneous_environment.cache_clear()
+    _lexer_cache.clear()
+
+
+def import_string(import_name: str, silent: bool = False) -> t.Any:
+    """Imports an object based on a string.  This is useful if you want to
+    use import paths as endpoints or something similar.  An import path can
+    be specified either in dotted notation (``xml.sax.saxutils.escape``)
+    or with a colon as object delimiter (``xml.sax.saxutils:escape``).
+
+    If the `silent` is True the return value will be `None` if the import
+    fails.
+
+    :return: imported object
+    """
+    try:
+        if ":" in import_name:
+            module, obj = import_name.split(":", 1)
+        elif "." in import_name:
+            module, _, obj = import_name.rpartition(".")
+        else:
+            return __import__(import_name)
+        return getattr(__import__(module, None, None, [obj]), obj)
+    except (ImportError, AttributeError):
+        if not silent:
+            raise
+
+
+def open_if_exists(filename: str, mode: str = "rb") -> t.Optional[t.IO]:
+    """Returns a file descriptor for the filename if that file exists,
+    otherwise ``None``.
+    """
+    if not os.path.isfile(filename):
+        return None
+
+    return open(filename, mode)
+
+
+def object_type_repr(obj: t.Any) -> str:
+    """Returns the name of the object's type.  For some recognized
+    singletons the name of the object is returned instead. (For
+    example for `None` and `Ellipsis`).
+    """
+    if obj is None:
+        return "None"
+    elif obj is Ellipsis:
+        return "Ellipsis"
+
+    cls = type(obj)
+
+    if cls.__module__ == "builtins":
+        return f"{cls.__name__} object"
+
+    return f"{cls.__module__}.{cls.__name__} object"
+
+
+def pformat(obj: t.Any) -> str:
+    """Format an object using :func:`pprint.pformat`."""
+    from pprint import pformat  # type: ignore
+
+    return pformat(obj)
+
+
+_http_re = re.compile(
+    r"""
+    ^
+    (
+        (https?://|www\.)  # scheme or www
+        (([\w%-]+\.)+)?  # subdomain
+        (
+            [a-z]{2,63}  # basic tld
+        |
+            xn--[\w%]{2,59}  # idna tld
+        )
+    |
+        ([\w%-]{2,63}\.)+  # basic domain
+        (com|net|int|edu|gov|org|info|mil)  # basic tld
+    |
+        (https?://)  # scheme
+        (
+            (([\d]{1,3})(\.[\d]{1,3}){3})  # IPv4
+        |
+            (\[([\da-f]{0,4}:){2}([\da-f]{0,4}:?){1,6}])  # IPv6
+        )
+    )
+    (?::[\d]{1,5})?  # port
+    (?:[/?#]\S*)?  # path, query, and fragment
+    $
+    """,
+    re.IGNORECASE | re.VERBOSE,
+)
+_email_re = re.compile(r"^\S+@\w[\w.-]*\.\w+$")
+
+
+def urlize(
+    text: str,
+    trim_url_limit: t.Optional[int] = None,
+    rel: t.Optional[str] = None,
+    target: t.Optional[str] = None,
+    extra_schemes: t.Optional[t.Iterable[str]] = None,
+) -> str:
+    """Convert URLs in text into clickable links.
+
+    This may not recognize links in some situations. Usually, a more
+    comprehensive formatter, such as a Markdown library, is a better
+    choice.
+
+    Works on ``http://``, ``https://``, ``www.``, ``mailto:``, and email
+    addresses. Links with trailing punctuation (periods, commas, closing
+    parentheses) and leading punctuation (opening parentheses) are
+    recognized excluding the punctuation. Email addresses that include
+    header fields are not recognized (for example,
+    ``mailto:address@example.com?cc=copy@example.com``).
+
+    :param text: Original text containing URLs to link.
+    :param trim_url_limit: Shorten displayed URL values to this length.
+    :param target: Add the ``target`` attribute to links.
+    :param rel: Add the ``rel`` attribute to links.
+    :param extra_schemes: Recognize URLs that start with these schemes
+        in addition to the default behavior.
+
+    .. versionchanged:: 3.0
+        The ``extra_schemes`` parameter was added.
+
+    .. versionchanged:: 3.0
+        Generate ``https://`` links for URLs without a scheme.
+
+    .. versionchanged:: 3.0
+        The parsing rules were updated. Recognize email addresses with
+        or without the ``mailto:`` scheme. Validate IP addresses. Ignore
+        parentheses and brackets in more cases.
+    """
+    if trim_url_limit is not None:
+
+        def trim_url(x: str) -> str:
+            if len(x) > trim_url_limit:  # type: ignore
+                return f"{x[:trim_url_limit]}..."
+
+            return x
+
+    else:
+
+        def trim_url(x: str) -> str:
+            return x
+
+    words = re.split(r"(\s+)", str(markupsafe.escape(text)))
+    rel_attr = f' rel="{markupsafe.escape(rel)}"' if rel else ""
+    target_attr = f' target="{markupsafe.escape(target)}"' if target else ""
+
+    for i, word in enumerate(words):
+        head, middle, tail = "", word, ""
+        match = re.match(r"^([(<]|&lt;)+", middle)
+
+        if match:
+            head = match.group()
+            middle = middle[match.end() :]
+
+        # Unlike lead, which is anchored to the start of the string,
+        # need to check that the string ends with any of the characters
+        # before trying to match all of them, to avoid backtracking.
+        if middle.endswith((")", ">", ".", ",", "\n", "&gt;")):
+            match = re.search(r"([)>.,\n]|&gt;)+$", middle)
+
+            if match:
+                tail = match.group()
+                middle = middle[: match.start()]
+
+        # Prefer balancing parentheses in URLs instead of ignoring a
+        # trailing character.
+        for start_char, end_char in ("(", ")"), ("<", ">"), ("&lt;", "&gt;"):
+            start_count = middle.count(start_char)
+
+            if start_count <= middle.count(end_char):
+                # Balanced, or lighter on the left
+                continue
+
+            # Move as many as possible from the tail to balance
+            for _ in range(min(start_count, tail.count(end_char))):
+                end_index = tail.index(end_char) + len(end_char)
+                # Move anything in the tail before the end char too
+                middle += tail[:end_index]
+                tail = tail[end_index:]
+
+        if _http_re.match(middle):
+            if middle.startswith("https://") or middle.startswith("http://"):
+                middle = (
+                    f'<a href="{middle}"{rel_attr}{target_attr}>{trim_url(middle)}</a>'
+                )
+            else:
+                middle = (
+                    f'<a href="https://{middle}"{rel_attr}{target_attr}>'
+                    f"{trim_url(middle)}</a>"
+                )
+
+        elif middle.startswith("mailto:") and _email_re.match(middle[7:]):
+            middle = f'<a href="{middle}">{middle[7:]}</a>'
+
+        elif (
+            "@" in middle
+            and not middle.startswith("www.")
+            and ":" not in middle
+            and _email_re.match(middle)
+        ):
+            middle = f'<a href="mailto:{middle}">{middle}</a>'
+
+        elif extra_schemes is not None:
+            for scheme in extra_schemes:
+                if middle != scheme and middle.startswith(scheme):
+                    middle = f'<a href="{middle}"{rel_attr}{target_attr}>{middle}</a>'
+
+        words[i] = f"{head}{middle}{tail}"
+
+    return "".join(words)
+
+
+def generate_lorem_ipsum(
+    n: int = 5, html: bool = True, min: int = 20, max: int = 100
+) -> str:
+    """Generate some lorem ipsum for the template."""
+    from .constants import LOREM_IPSUM_WORDS
+
+    words = LOREM_IPSUM_WORDS.split()
+    result = []
+
+    for _ in range(n):
+        next_capitalized = True
+        last_comma = last_fullstop = 0
+        word = None
+        last = None
+        p = []
+
+        # each paragraph contains out of 20 to 100 words.
+        for idx, _ in enumerate(range(randrange(min, max))):
+            while True:
+                word = choice(words)
+                if word != last:
+                    last = word
+                    break
+            if next_capitalized:
+                word = word.capitalize()
+                next_capitalized = False
+            # add commas
+            if idx - randrange(3, 8) > last_comma:
+                last_comma = idx
+                last_fullstop += 2
+                word += ","
+            # add end of sentences
+            if idx - randrange(10, 20) > last_fullstop:
+                last_comma = last_fullstop = idx
+                word += "."
+                next_capitalized = True
+            p.append(word)
+
+        # ensure that the paragraph ends with a dot.
+        p_str = " ".join(p)
+
+        if p_str.endswith(","):
+            p_str = p_str[:-1] + "."
+        elif not p_str.endswith("."):
+            p_str += "."
+
+        result.append(p_str)
+
+    if not html:
+        return "\n\n".join(result)
+    return markupsafe.Markup(
+        "\n".join(f"<p>{markupsafe.escape(x)}</p>" for x in result)
+    )
+
+
+def url_quote(obj: t.Any, charset: str = "utf-8", for_qs: bool = False) -> str:
+    """Quote a string for use in a URL using the given charset.
+
+    :param obj: String or bytes to quote. Other types are converted to
+        string then encoded to bytes using the given charset.
+    :param charset: Encode text to bytes using this charset.
+    :param for_qs: Quote "/" and use "+" for spaces.
+    """
+    if not isinstance(obj, bytes):
+        if not isinstance(obj, str):
+            obj = str(obj)
+
+        obj = obj.encode(charset)
+
+    safe = b"" if for_qs else b"/"
+    rv = quote_from_bytes(obj, safe)
+
+    if for_qs:
+        rv = rv.replace("%20", "+")
+
+    return rv
+
+
+@abc.MutableMapping.register
+class LRUCache:
+    """A simple LRU Cache implementation."""
+
+    # this is fast for small capacities (something below 1000) but doesn't
+    # scale.  But as long as it's only used as storage for templates this
+    # won't do any harm.
+
+    def __init__(self, capacity: int) -> None:
+        self.capacity = capacity
+        self._mapping: t.Dict[t.Any, t.Any] = {}
+        self._queue: "te.Deque[t.Any]" = deque()
+        self._postinit()
+
+    def _postinit(self) -> None:
+        # alias all queue methods for faster lookup
+        self._popleft = self._queue.popleft
+        self._pop = self._queue.pop
+        self._remove = self._queue.remove
+        self._wlock = Lock()
+        self._append = self._queue.append
+
+    def __getstate__(self) -> t.Mapping[str, t.Any]:
+        return {
+            "capacity": self.capacity,
+            "_mapping": self._mapping,
+            "_queue": self._queue,
+        }
+
+    def __setstate__(self, d: t.Mapping[str, t.Any]) -> None:
+        self.__dict__.update(d)
+        self._postinit()
+
+    def __getnewargs__(self) -> t.Tuple:
+        return (self.capacity,)
+
+    def copy(self) -> "LRUCache":
+        """Return a shallow copy of the instance."""
+        rv = self.__class__(self.capacity)
+        rv._mapping.update(self._mapping)
+        rv._queue.extend(self._queue)
+        return rv
+
+    def get(self, key: t.Any, default: t.Any = None) -> t.Any:
+        """Return an item from the cache dict or `default`"""
+        try:
+            return self[key]
+        except KeyError:
+            return default
+
+    def setdefault(self, key: t.Any, default: t.Any = None) -> t.Any:
+        """Set `default` if the key is not in the cache otherwise
+        leave unchanged. Return the value of this key.
+        """
+        try:
+            return self[key]
+        except KeyError:
+            self[key] = default
+            return default
+
+    def clear(self) -> None:
+        """Clear the cache."""
+        with self._wlock:
+            self._mapping.clear()
+            self._queue.clear()
+
+    def __contains__(self, key: t.Any) -> bool:
+        """Check if a key exists in this cache."""
+        return key in self._mapping
+
+    def __len__(self) -> int:
+        """Return the current size of the cache."""
+        return len(self._mapping)
+
+    def __repr__(self) -> str:
+        return f"<{type(self).__name__} {self._mapping!r}>"
+
+    def __getitem__(self, key: t.Any) -> t.Any:
+        """Get an item from the cache. Moves the item up so that it has the
+        highest priority then.
+
+        Raise a `KeyError` if it does not exist.
+        """
+        with self._wlock:
+            rv = self._mapping[key]
+
+            if self._queue[-1] != key:
+                try:
+                    self._remove(key)
+                except ValueError:
+                    # if something removed the key from the container
+                    # when we read, ignore the ValueError that we would
+                    # get otherwise.
+                    pass
+
+                self._append(key)
+
+            return rv
+
+    def __setitem__(self, key: t.Any, value: t.Any) -> None:
+        """Sets the value for an item. Moves the item up so that it
+        has the highest priority then.
+        """
+        with self._wlock:
+            if key in self._mapping:
+                self._remove(key)
+            elif len(self._mapping) == self.capacity:
+                del self._mapping[self._popleft()]
+
+            self._append(key)
+            self._mapping[key] = value
+
+    def __delitem__(self, key: t.Any) -> None:
+        """Remove an item from the cache dict.
+        Raise a `KeyError` if it does not exist.
+        """
+        with self._wlock:
+            del self._mapping[key]
+
+            try:
+                self._remove(key)
+            except ValueError:
+                pass
+
+    def items(self) -> t.Iterable[t.Tuple[t.Any, t.Any]]:
+        """Return a list of items."""
+        result = [(key, self._mapping[key]) for key in list(self._queue)]
+        result.reverse()
+        return result
+
+    def values(self) -> t.Iterable[t.Any]:
+        """Return a list of all values."""
+        return [x[1] for x in self.items()]
+
+    def keys(self) -> t.Iterable[t.Any]:
+        """Return a list of all keys ordered by most recent usage."""
+        return list(self)
+
+    def __iter__(self) -> t.Iterator[t.Any]:
+        return reversed(tuple(self._queue))
+
+    def __reversed__(self) -> t.Iterator[t.Any]:
+        """Iterate over the keys in the cache dict, oldest items
+        coming first.
+        """
+        return iter(tuple(self._queue))
+
+    __copy__ = copy
+
+
+def select_autoescape(
+    enabled_extensions: t.Collection[str] = ("html", "htm", "xml"),
+    disabled_extensions: t.Collection[str] = (),
+    default_for_string: bool = True,
+    default: bool = False,
+) -> t.Callable[[t.Optional[str]], bool]:
+    """Intelligently sets the initial value of autoescaping based on the
+    filename of the template.  This is the recommended way to configure
+    autoescaping if you do not want to write a custom function yourself.
+
+    If you want to enable it for all templates created from strings or
+    for all templates with `.html` and `.xml` extensions::
+
+        from jinja2 import Environment, select_autoescape
+        env = Environment(autoescape=select_autoescape(
+            enabled_extensions=('html', 'xml'),
+            default_for_string=True,
+        ))
+
+    Example configuration to turn it on at all times except if the template
+    ends with `.txt`::
+
+        from jinja2 import Environment, select_autoescape
+        env = Environment(autoescape=select_autoescape(
+            disabled_extensions=('txt',),
+            default_for_string=True,
+            default=True,
+        ))
+
+    The `enabled_extensions` is an iterable of all the extensions that
+    autoescaping should be enabled for.  Likewise `disabled_extensions` is
+    a list of all templates it should be disabled for.  If a template is
+    loaded from a string then the default from `default_for_string` is used.
+    If nothing matches then the initial value of autoescaping is set to the
+    value of `default`.
+
+    For security reasons this function operates case insensitive.
+
+    .. versionadded:: 2.9
+    """
+    enabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in enabled_extensions)
+    disabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in disabled_extensions)
+
+    def autoescape(template_name: t.Optional[str]) -> bool:
+        if template_name is None:
+            return default_for_string
+        template_name = template_name.lower()
+        if template_name.endswith(enabled_patterns):
+            return True
+        if template_name.endswith(disabled_patterns):
+            return False
+        return default
+
+    return autoescape
+
+
+def htmlsafe_json_dumps(
+    obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any
+) -> markupsafe.Markup:
+    """Serialize an object to a string of JSON with :func:`json.dumps`,
+    then replace HTML-unsafe characters with Unicode escapes and mark
+    the result safe with :class:`~markupsafe.Markup`.
+
+    This is available in templates as the ``|tojson`` filter.
+
+    The following characters are escaped: ``<``, ``>``, ``&``, ``'``.
+
+    The returned string is safe to render in HTML documents and
+    ``<script>`` tags. The exception is in HTML attributes that are
+    double quoted; either use single quotes or the ``|forceescape``
+    filter.
+
+    :param obj: The object to serialize to JSON.
+    :param dumps: The ``dumps`` function to use. Defaults to
+        ``env.policies["json.dumps_function"]``, which defaults to
+        :func:`json.dumps`.
+    :param kwargs: Extra arguments to pass to ``dumps``. Merged onto
+        ``env.policies["json.dumps_kwargs"]``.
+
+    .. versionchanged:: 3.0
+        The ``dumper`` parameter is renamed to ``dumps``.
+
+    .. versionadded:: 2.9
+    """
+    if dumps is None:
+        dumps = json.dumps
+
+    return markupsafe.Markup(
+        dumps(obj, **kwargs)
+        .replace("<", "\\u003c")
+        .replace(">", "\\u003e")
+        .replace("&", "\\u0026")
+        .replace("'", "\\u0027")
+    )
+
+
+class Cycler:
+    """Cycle through values by yield them one at a time, then restarting
+    once the end is reached. Available as ``cycler`` in templates.
+
+    Similar to ``loop.cycle``, but can be used outside loops or across
+    multiple loops. For example, render a list of folders and files in a
+    list, alternating giving them "odd" and "even" classes.
+
+    .. code-block:: html+jinja
+
+        {% set row_class = cycler("odd", "even") %}
+        <ul class="browser">
+        {% for folder in folders %}
+          <li class="folder {{ row_class.next() }}">{{ folder }}
+        {% endfor %}
+        {% for file in files %}
+          <li class="file {{ row_class.next() }}">{{ file }}
+        {% endfor %}
+        </ul>
+
+    :param items: Each positional argument will be yielded in the order
+        given for each cycle.
+
+    .. versionadded:: 2.1
+    """
+
+    def __init__(self, *items: t.Any) -> None:
+        if not items:
+            raise RuntimeError("at least one item has to be provided")
+        self.items = items
+        self.pos = 0
+
+    def reset(self) -> None:
+        """Resets the current item to the first item."""
+        self.pos = 0
+
+    @property
+    def current(self) -> t.Any:
+        """Return the current item. Equivalent to the item that will be
+        returned next time :meth:`next` is called.
+        """
+        return self.items[self.pos]
+
+    def next(self) -> t.Any:
+        """Return the current item, then advance :attr:`current` to the
+        next item.
+        """
+        rv = self.current
+        self.pos = (self.pos + 1) % len(self.items)
+        return rv
+
+    __next__ = next
+
+
+class Joiner:
+    """A joining helper for templates."""
+
+    def __init__(self, sep: str = ", ") -> None:
+        self.sep = sep
+        self.used = False
+
+    def __call__(self) -> str:
+        if not self.used:
+            self.used = True
+            return ""
+        return self.sep
+
+
+class Namespace:
+    """A namespace object that can hold arbitrary attributes.  It may be
+    initialized from a dictionary or with keyword arguments."""
+
+    def __init__(*args: t.Any, **kwargs: t.Any) -> None:  # noqa: B902
+        self, args = args[0], args[1:]
+        self.__attrs = dict(*args, **kwargs)
+
+    def __getattribute__(self, name: str) -> t.Any:
+        # __class__ is needed for the awaitable check in async mode
+        if name in {"_Namespace__attrs", "__class__"}:
+            return object.__getattribute__(self, name)
+        try:
+            return self.__attrs[name]
+        except KeyError:
+            raise AttributeError(name) from None
+
+    def __setitem__(self, name: str, value: t.Any) -> None:
+        self.__attrs[name] = value
+
+    def __repr__(self) -> str:
+        return f"<Namespace {self.__attrs!r}>"
diff --git a/venv/lib/python3.9/site-packages/jinja2/visitor.py b/venv/lib/python3.9/site-packages/jinja2/visitor.py
new file mode 100644
index 0000000..17c6aab
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/jinja2/visitor.py
@@ -0,0 +1,92 @@
+"""API for traversing the AST nodes. Implemented by the compiler and
+meta introspection.
+"""
+import typing as t
+
+from .nodes import Node
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+
+    class VisitCallable(te.Protocol):
+        def __call__(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any:
+            ...
+
+
+class NodeVisitor:
+    """Walks the abstract syntax tree and call visitor functions for every
+    node found.  The visitor functions may return values which will be
+    forwarded by the `visit` method.
+
+    Per default the visitor functions for the nodes are ``'visit_'`` +
+    class name of the node.  So a `TryFinally` node visit function would
+    be `visit_TryFinally`.  This behavior can be changed by overriding
+    the `get_visitor` function.  If no visitor function exists for a node
+    (return value `None`) the `generic_visit` visitor is used instead.
+    """
+
+    def get_visitor(self, node: Node) -> "t.Optional[VisitCallable]":
+        """Return the visitor function for this node or `None` if no visitor
+        exists for this node.  In that case the generic visit function is
+        used instead.
+        """
+        return getattr(self, f"visit_{type(node).__name__}", None)
+
+    def visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any:
+        """Visit a node."""
+        f = self.get_visitor(node)
+
+        if f is not None:
+            return f(node, *args, **kwargs)
+
+        return self.generic_visit(node, *args, **kwargs)
+
+    def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any:
+        """Called if no explicit visitor function exists for a node."""
+        for child_node in node.iter_child_nodes():
+            self.visit(child_node, *args, **kwargs)
+
+
+class NodeTransformer(NodeVisitor):
+    """Walks the abstract syntax tree and allows modifications of nodes.
+
+    The `NodeTransformer` will walk the AST and use the return value of the
+    visitor functions to replace or remove the old node.  If the return
+    value of the visitor function is `None` the node will be removed
+    from the previous location otherwise it's replaced with the return
+    value.  The return value may be the original node in which case no
+    replacement takes place.
+    """
+
+    def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> Node:
+        for field, old_value in node.iter_fields():
+            if isinstance(old_value, list):
+                new_values = []
+                for value in old_value:
+                    if isinstance(value, Node):
+                        value = self.visit(value, *args, **kwargs)
+                        if value is None:
+                            continue
+                        elif not isinstance(value, Node):
+                            new_values.extend(value)
+                            continue
+                    new_values.append(value)
+                old_value[:] = new_values
+            elif isinstance(old_value, Node):
+                new_node = self.visit(old_value, *args, **kwargs)
+                if new_node is None:
+                    delattr(node, field)
+                else:
+                    setattr(node, field, new_node)
+        return node
+
+    def visit_list(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.List[Node]:
+        """As transformers may return lists in some places this method
+        can be used to enforce a list as return value.
+        """
+        rv = self.visit(node, *args, **kwargs)
+
+        if not isinstance(rv, list):
+            return [rv]
+
+        return rv
diff --git a/venv/lib/python3.9/site-packages/markupsafe/__init__.py b/venv/lib/python3.9/site-packages/markupsafe/__init__.py
new file mode 100644
index 0000000..0f1c4f4
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/markupsafe/__init__.py
@@ -0,0 +1,295 @@
+import functools
+import re
+import string
+import typing as t
+
+if t.TYPE_CHECKING:
+    import typing_extensions as te
+
+    class HasHTML(te.Protocol):
+        def __html__(self) -> str:
+            pass
+
+
+__version__ = "2.1.1"
+
+_strip_comments_re = re.compile(r"<!--.*?-->")
+_strip_tags_re = re.compile(r"<.*?>")
+
+
+def _simple_escaping_wrapper(name: str) -> t.Callable[..., "Markup"]:
+    orig = getattr(str, name)
+
+    @functools.wraps(orig)
+    def wrapped(self: "Markup", *args: t.Any, **kwargs: t.Any) -> "Markup":
+        args = _escape_argspec(list(args), enumerate(args), self.escape)  # type: ignore
+        _escape_argspec(kwargs, kwargs.items(), self.escape)
+        return self.__class__(orig(self, *args, **kwargs))
+
+    return wrapped
+
+
+class Markup(str):
+    """A string that is ready to be safely inserted into an HTML or XML
+    document, either because it was escaped or because it was marked
+    safe.
+
+    Passing an object to the constructor converts it to text and wraps
+    it to mark it safe without escaping. To escape the text, use the
+    :meth:`escape` class method instead.
+
+    >>> Markup("Hello, <em>World</em>!")
+    Markup('Hello, <em>World</em>!')
+    >>> Markup(42)
+    Markup('42')
+    >>> Markup.escape("Hello, <em>World</em>!")
+    Markup('Hello &lt;em&gt;World&lt;/em&gt;!')
+
+    This implements the ``__html__()`` interface that some frameworks
+    use. Passing an object that implements ``__html__()`` will wrap the
+    output of that method, marking it safe.
+
+    >>> class Foo:
+    ...     def __html__(self):
+    ...         return '<a href="/foo">foo</a>'
+    ...
+    >>> Markup(Foo())
+    Markup('<a href="/foo">foo</a>')
+
+    This is a subclass of :class:`str`. It has the same methods, but
+    escapes their arguments and returns a ``Markup`` instance.
+
+    >>> Markup("<em>%s</em>") % ("foo & bar",)
+    Markup('<em>foo &amp; bar</em>')
+    >>> Markup("<em>Hello</em> ") + "<foo>"
+    Markup('<em>Hello</em> &lt;foo&gt;')
+    """
+
+    __slots__ = ()
+
+    def __new__(
+        cls, base: t.Any = "", encoding: t.Optional[str] = None, errors: str = "strict"
+    ) -> "Markup":
+        if hasattr(base, "__html__"):
+            base = base.__html__()
+
+        if encoding is None:
+            return super().__new__(cls, base)
+
+        return super().__new__(cls, base, encoding, errors)
+
+    def __html__(self) -> "Markup":
+        return self
+
+    def __add__(self, other: t.Union[str, "HasHTML"]) -> "Markup":
+        if isinstance(other, str) or hasattr(other, "__html__"):
+            return self.__class__(super().__add__(self.escape(other)))
+
+        return NotImplemented
+
+    def __radd__(self, other: t.Union[str, "HasHTML"]) -> "Markup":
+        if isinstance(other, str) or hasattr(other, "__html__"):
+            return self.escape(other).__add__(self)
+
+        return NotImplemented
+
+    def __mul__(self, num: "te.SupportsIndex") -> "Markup":
+        if isinstance(num, int):
+            return self.__class__(super().__mul__(num))
+
+        return NotImplemented
+
+    __rmul__ = __mul__
+
+    def __mod__(self, arg: t.Any) -> "Markup":
+        if isinstance(arg, tuple):
+            # a tuple of arguments, each wrapped
+            arg = tuple(_MarkupEscapeHelper(x, self.escape) for x in arg)
+        elif hasattr(type(arg), "__getitem__") and not isinstance(arg, str):
+            # a mapping of arguments, wrapped
+            arg = _MarkupEscapeHelper(arg, self.escape)
+        else:
+            # a single argument, wrapped with the helper and a tuple
+            arg = (_MarkupEscapeHelper(arg, self.escape),)
+
+        return self.__class__(super().__mod__(arg))
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({super().__repr__()})"
+
+    def join(self, seq: t.Iterable[t.Union[str, "HasHTML"]]) -> "Markup":
+        return self.__class__(super().join(map(self.escape, seq)))
+
+    join.__doc__ = str.join.__doc__
+
+    def split(  # type: ignore
+        self, sep: t.Optional[str] = None, maxsplit: int = -1
+    ) -> t.List["Markup"]:
+        return [self.__class__(v) for v in super().split(sep, maxsplit)]
+
+    split.__doc__ = str.split.__doc__
+
+    def rsplit(  # type: ignore
+        self, sep: t.Optional[str] = None, maxsplit: int = -1
+    ) -> t.List["Markup"]:
+        return [self.__class__(v) for v in super().rsplit(sep, maxsplit)]
+
+    rsplit.__doc__ = str.rsplit.__doc__
+
+    def splitlines(self, keepends: bool = False) -> t.List["Markup"]:  # type: ignore
+        return [self.__class__(v) for v in super().splitlines(keepends)]
+
+    splitlines.__doc__ = str.splitlines.__doc__
+
+    def unescape(self) -> str:
+        """Convert escaped markup back into a text string. This replaces
+        HTML entities with the characters they represent.
+
+        >>> Markup("Main &raquo; <em>About</em>").unescape()
+        'Main » <em>About</em>'
+        """
+        from html import unescape
+
+        return unescape(str(self))
+
+    def striptags(self) -> str:
+        """:meth:`unescape` the markup, remove tags, and normalize
+        whitespace to single spaces.
+
+        >>> Markup("Main &raquo;\t<em>About</em>").striptags()
+        'Main » About'
+        """
+        # Use two regexes to avoid ambiguous matches.
+        value = _strip_comments_re.sub("", self)
+        value = _strip_tags_re.sub("", value)
+        value = " ".join(value.split())
+        return Markup(value).unescape()
+
+    @classmethod
+    def escape(cls, s: t.Any) -> "Markup":
+        """Escape a string. Calls :func:`escape` and ensures that for
+        subclasses the correct type is returned.
+        """
+        rv = escape(s)
+
+        if rv.__class__ is not cls:
+            return cls(rv)
+
+        return rv
+
+    for method in (
+        "__getitem__",
+        "capitalize",
+        "title",
+        "lower",
+        "upper",
+        "replace",
+        "ljust",
+        "rjust",
+        "lstrip",
+        "rstrip",
+        "center",
+        "strip",
+        "translate",
+        "expandtabs",
+        "swapcase",
+        "zfill",
+    ):
+        locals()[method] = _simple_escaping_wrapper(method)
+
+    del method
+
+    def partition(self, sep: str) -> t.Tuple["Markup", "Markup", "Markup"]:
+        l, s, r = super().partition(self.escape(sep))
+        cls = self.__class__
+        return cls(l), cls(s), cls(r)
+
+    def rpartition(self, sep: str) -> t.Tuple["Markup", "Markup", "Markup"]:
+        l, s, r = super().rpartition(self.escape(sep))
+        cls = self.__class__
+        return cls(l), cls(s), cls(r)
+
+    def format(self, *args: t.Any, **kwargs: t.Any) -> "Markup":
+        formatter = EscapeFormatter(self.escape)
+        return self.__class__(formatter.vformat(self, args, kwargs))
+
+    def __html_format__(self, format_spec: str) -> "Markup":
+        if format_spec:
+            raise ValueError("Unsupported format specification for Markup.")
+
+        return self
+
+
+class EscapeFormatter(string.Formatter):
+    __slots__ = ("escape",)
+
+    def __init__(self, escape: t.Callable[[t.Any], Markup]) -> None:
+        self.escape = escape
+        super().__init__()
+
+    def format_field(self, value: t.Any, format_spec: str) -> str:
+        if hasattr(value, "__html_format__"):
+            rv = value.__html_format__(format_spec)
+        elif hasattr(value, "__html__"):
+            if format_spec:
+                raise ValueError(
+                    f"Format specifier {format_spec} given, but {type(value)} does not"
+                    " define __html_format__. A class that defines __html__ must define"
+                    " __html_format__ to work with format specifiers."
+                )
+            rv = value.__html__()
+        else:
+            # We need to make sure the format spec is str here as
+            # otherwise the wrong callback methods are invoked.
+            rv = string.Formatter.format_field(self, value, str(format_spec))
+        return str(self.escape(rv))
+
+
+_ListOrDict = t.TypeVar("_ListOrDict", list, dict)
+
+
+def _escape_argspec(
+    obj: _ListOrDict, iterable: t.Iterable[t.Any], escape: t.Callable[[t.Any], Markup]
+) -> _ListOrDict:
+    """Helper for various string-wrapped functions."""
+    for key, value in iterable:
+        if isinstance(value, str) or hasattr(value, "__html__"):
+            obj[key] = escape(value)
+
+    return obj
+
+
+class _MarkupEscapeHelper:
+    """Helper for :meth:`Markup.__mod__`."""
+
+    __slots__ = ("obj", "escape")
+
+    def __init__(self, obj: t.Any, escape: t.Callable[[t.Any], Markup]) -> None:
+        self.obj = obj
+        self.escape = escape
+
+    def __getitem__(self, item: t.Any) -> "_MarkupEscapeHelper":
+        return _MarkupEscapeHelper(self.obj[item], self.escape)
+
+    def __str__(self) -> str:
+        return str(self.escape(self.obj))
+
+    def __repr__(self) -> str:
+        return str(self.escape(repr(self.obj)))
+
+    def __int__(self) -> int:
+        return int(self.obj)
+
+    def __float__(self) -> float:
+        return float(self.obj)
+
+
+# circular import
+try:
+    from ._speedups import escape as escape
+    from ._speedups import escape_silent as escape_silent
+    from ._speedups import soft_str as soft_str
+except ImportError:
+    from ._native import escape as escape
+    from ._native import escape_silent as escape_silent  # noqa: F401
+    from ._native import soft_str as soft_str  # noqa: F401
diff --git a/venv/lib/python3.9/site-packages/markupsafe/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/markupsafe/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..4f2dbb2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/markupsafe/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/markupsafe/__pycache__/_native.cpython-39.pyc b/venv/lib/python3.9/site-packages/markupsafe/__pycache__/_native.cpython-39.pyc
new file mode 100644
index 0000000..e0ed905
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/markupsafe/__pycache__/_native.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/markupsafe/_native.py b/venv/lib/python3.9/site-packages/markupsafe/_native.py
new file mode 100644
index 0000000..8117b27
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/markupsafe/_native.py
@@ -0,0 +1,63 @@
+import typing as t
+
+from . import Markup
+
+
+def escape(s: t.Any) -> Markup:
+    """Replace the characters ``&``, ``<``, ``>``, ``'``, and ``"`` in
+    the string with HTML-safe sequences. Use this if you need to display
+    text that might contain such characters in HTML.
+
+    If the object has an ``__html__`` method, it is called and the
+    return value is assumed to already be safe for HTML.
+
+    :param s: An object to be converted to a string and escaped.
+    :return: A :class:`Markup` string with the escaped text.
+    """
+    if hasattr(s, "__html__"):
+        return Markup(s.__html__())
+
+    return Markup(
+        str(s)
+        .replace("&", "&amp;")
+        .replace(">", "&gt;")
+        .replace("<", "&lt;")
+        .replace("'", "&#39;")
+        .replace('"', "&#34;")
+    )
+
+
+def escape_silent(s: t.Optional[t.Any]) -> Markup:
+    """Like :func:`escape` but treats ``None`` as the empty string.
+    Useful with optional values, as otherwise you get the string
+    ``'None'`` when the value is ``None``.
+
+    >>> escape(None)
+    Markup('None')
+    >>> escape_silent(None)
+    Markup('')
+    """
+    if s is None:
+        return Markup()
+
+    return escape(s)
+
+
+def soft_str(s: t.Any) -> str:
+    """Convert an object to a string if it isn't already. This preserves
+    a :class:`Markup` string rather than converting it back to a basic
+    string, so it will still be marked as safe and won't be escaped
+    again.
+
+    >>> value = escape("<User 1>")
+    >>> value
+    Markup('&lt;User 1&gt;')
+    >>> escape(str(value))
+    Markup('&amp;lt;User 1&amp;gt;')
+    >>> escape(soft_str(value))
+    Markup('&lt;User 1&gt;')
+    """
+    if not isinstance(s, str):
+        return str(s)
+
+    return s
diff --git a/venv/lib/python3.9/site-packages/markupsafe/_speedups.c b/venv/lib/python3.9/site-packages/markupsafe/_speedups.c
new file mode 100644
index 0000000..3c463fb
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/markupsafe/_speedups.c
@@ -0,0 +1,320 @@
+#include <Python.h>
+
+static PyObject* markup;
+
+static int
+init_constants(void)
+{
+	PyObject *module;
+
+	/* import markup type so that we can mark the return value */
+	module = PyImport_ImportModule("markupsafe");
+	if (!module)
+		return 0;
+	markup = PyObject_GetAttrString(module, "Markup");
+	Py_DECREF(module);
+
+	return 1;
+}
+
+#define GET_DELTA(inp, inp_end, delta) \
+	while (inp < inp_end) { \
+		switch (*inp++) { \
+		case '"': \
+		case '\'': \
+		case '&': \
+			delta += 4; \
+			break; \
+		case '<': \
+		case '>': \
+			delta += 3; \
+			break; \
+		} \
+	}
+
+#define DO_ESCAPE(inp, inp_end, outp) \
+	{ \
+		Py_ssize_t ncopy = 0; \
+		while (inp < inp_end) { \
+			switch (*inp) { \
+			case '"': \
+				memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+				outp += ncopy; ncopy = 0; \
+				*outp++ = '&'; \
+				*outp++ = '#'; \
+				*outp++ = '3'; \
+				*outp++ = '4'; \
+				*outp++ = ';'; \
+				break; \
+			case '\'': \
+				memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+				outp += ncopy; ncopy = 0; \
+				*outp++ = '&'; \
+				*outp++ = '#'; \
+				*outp++ = '3'; \
+				*outp++ = '9'; \
+				*outp++ = ';'; \
+				break; \
+			case '&': \
+				memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+				outp += ncopy; ncopy = 0; \
+				*outp++ = '&'; \
+				*outp++ = 'a'; \
+				*outp++ = 'm'; \
+				*outp++ = 'p'; \
+				*outp++ = ';'; \
+				break; \
+			case '<': \
+				memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+				outp += ncopy; ncopy = 0; \
+				*outp++ = '&'; \
+				*outp++ = 'l'; \
+				*outp++ = 't'; \
+				*outp++ = ';'; \
+				break; \
+			case '>': \
+				memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+				outp += ncopy; ncopy = 0; \
+				*outp++ = '&'; \
+				*outp++ = 'g'; \
+				*outp++ = 't'; \
+				*outp++ = ';'; \
+				break; \
+			default: \
+				ncopy++; \
+			} \
+			inp++; \
+		} \
+		memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
+	}
+
+static PyObject*
+escape_unicode_kind1(PyUnicodeObject *in)
+{
+	Py_UCS1 *inp = PyUnicode_1BYTE_DATA(in);
+	Py_UCS1 *inp_end = inp + PyUnicode_GET_LENGTH(in);
+	Py_UCS1 *outp;
+	PyObject *out;
+	Py_ssize_t delta = 0;
+
+	GET_DELTA(inp, inp_end, delta);
+	if (!delta) {
+		Py_INCREF(in);
+		return (PyObject*)in;
+	}
+
+	out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta,
+						PyUnicode_IS_ASCII(in) ? 127 : 255);
+	if (!out)
+		return NULL;
+
+	inp = PyUnicode_1BYTE_DATA(in);
+	outp = PyUnicode_1BYTE_DATA(out);
+	DO_ESCAPE(inp, inp_end, outp);
+	return out;
+}
+
+static PyObject*
+escape_unicode_kind2(PyUnicodeObject *in)
+{
+	Py_UCS2 *inp = PyUnicode_2BYTE_DATA(in);
+	Py_UCS2 *inp_end = inp + PyUnicode_GET_LENGTH(in);
+	Py_UCS2 *outp;
+	PyObject *out;
+	Py_ssize_t delta = 0;
+
+	GET_DELTA(inp, inp_end, delta);
+	if (!delta) {
+		Py_INCREF(in);
+		return (PyObject*)in;
+	}
+
+	out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 65535);
+	if (!out)
+		return NULL;
+
+	inp = PyUnicode_2BYTE_DATA(in);
+	outp = PyUnicode_2BYTE_DATA(out);
+	DO_ESCAPE(inp, inp_end, outp);
+	return out;
+}
+
+
+static PyObject*
+escape_unicode_kind4(PyUnicodeObject *in)
+{
+	Py_UCS4 *inp = PyUnicode_4BYTE_DATA(in);
+	Py_UCS4 *inp_end = inp + PyUnicode_GET_LENGTH(in);
+	Py_UCS4 *outp;
+	PyObject *out;
+	Py_ssize_t delta = 0;
+
+	GET_DELTA(inp, inp_end, delta);
+	if (!delta) {
+		Py_INCREF(in);
+		return (PyObject*)in;
+	}
+
+	out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 1114111);
+	if (!out)
+		return NULL;
+
+	inp = PyUnicode_4BYTE_DATA(in);
+	outp = PyUnicode_4BYTE_DATA(out);
+	DO_ESCAPE(inp, inp_end, outp);
+	return out;
+}
+
+static PyObject*
+escape_unicode(PyUnicodeObject *in)
+{
+	if (PyUnicode_READY(in))
+		return NULL;
+
+	switch (PyUnicode_KIND(in)) {
+	case PyUnicode_1BYTE_KIND:
+		return escape_unicode_kind1(in);
+	case PyUnicode_2BYTE_KIND:
+		return escape_unicode_kind2(in);
+	case PyUnicode_4BYTE_KIND:
+		return escape_unicode_kind4(in);
+	}
+	assert(0);  /* shouldn't happen */
+	return NULL;
+}
+
+static PyObject*
+escape(PyObject *self, PyObject *text)
+{
+	static PyObject *id_html;
+	PyObject *s = NULL, *rv = NULL, *html;
+
+	if (id_html == NULL) {
+		id_html = PyUnicode_InternFromString("__html__");
+		if (id_html == NULL) {
+			return NULL;
+		}
+	}
+
+	/* we don't have to escape integers, bools or floats */
+	if (PyLong_CheckExact(text) ||
+		PyFloat_CheckExact(text) || PyBool_Check(text) ||
+		text == Py_None)
+		return PyObject_CallFunctionObjArgs(markup, text, NULL);
+
+	/* if the object has an __html__ method that performs the escaping */
+	html = PyObject_GetAttr(text ,id_html);
+	if (html) {
+		s = PyObject_CallObject(html, NULL);
+		Py_DECREF(html);
+		if (s == NULL) {
+			return NULL;
+		}
+		/* Convert to Markup object */
+		rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL);
+		Py_DECREF(s);
+		return rv;
+	}
+
+	/* otherwise make the object unicode if it isn't, then escape */
+	PyErr_Clear();
+	if (!PyUnicode_Check(text)) {
+		PyObject *unicode = PyObject_Str(text);
+		if (!unicode)
+			return NULL;
+		s = escape_unicode((PyUnicodeObject*)unicode);
+		Py_DECREF(unicode);
+	}
+	else
+		s = escape_unicode((PyUnicodeObject*)text);
+
+	/* convert the unicode string into a markup object. */
+	rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL);
+	Py_DECREF(s);
+	return rv;
+}
+
+
+static PyObject*
+escape_silent(PyObject *self, PyObject *text)
+{
+	if (text != Py_None)
+		return escape(self, text);
+	return PyObject_CallFunctionObjArgs(markup, NULL);
+}
+
+
+static PyObject*
+soft_str(PyObject *self, PyObject *s)
+{
+	if (!PyUnicode_Check(s))
+		return PyObject_Str(s);
+	Py_INCREF(s);
+	return s;
+}
+
+
+static PyMethodDef module_methods[] = {
+	{
+		"escape",
+		(PyCFunction)escape,
+		METH_O,
+		"Replace the characters ``&``, ``<``, ``>``, ``'``, and ``\"`` in"
+		" the string with HTML-safe sequences. Use this if you need to display"
+		" text that might contain such characters in HTML.\n\n"
+		"If the object has an ``__html__`` method, it is called and the"
+		" return value is assumed to already be safe for HTML.\n\n"
+		":param s: An object to be converted to a string and escaped.\n"
+		":return: A :class:`Markup` string with the escaped text.\n"
+	},
+	{
+		"escape_silent",
+		(PyCFunction)escape_silent,
+		METH_O,
+		"Like :func:`escape` but treats ``None`` as the empty string."
+		" Useful with optional values, as otherwise you get the string"
+		" ``'None'`` when the value is ``None``.\n\n"
+		">>> escape(None)\n"
+		"Markup('None')\n"
+		">>> escape_silent(None)\n"
+		"Markup('')\n"
+	},
+	{
+		"soft_str",
+		(PyCFunction)soft_str,
+		METH_O,
+		"Convert an object to a string if it isn't already. This preserves"
+		" a :class:`Markup` string rather than converting it back to a basic"
+		" string, so it will still be marked as safe and won't be escaped"
+		" again.\n\n"
+		">>> value = escape(\"<User 1>\")\n"
+		">>> value\n"
+		"Markup('&lt;User 1&gt;')\n"
+		">>> escape(str(value))\n"
+		"Markup('&amp;lt;User 1&amp;gt;')\n"
+		">>> escape(soft_str(value))\n"
+		"Markup('&lt;User 1&gt;')\n"
+	},
+	{NULL, NULL, 0, NULL}  /* Sentinel */
+};
+
+static struct PyModuleDef module_definition = {
+	PyModuleDef_HEAD_INIT,
+	"markupsafe._speedups",
+	NULL,
+	-1,
+	module_methods,
+	NULL,
+	NULL,
+	NULL,
+	NULL
+};
+
+PyMODINIT_FUNC
+PyInit__speedups(void)
+{
+	if (!init_constants())
+		return NULL;
+
+	return PyModule_Create(&module_definition);
+}
diff --git a/venv/lib/python3.9/site-packages/markupsafe/_speedups.cpython-39-x86_64-linux-gnu.so b/venv/lib/python3.9/site-packages/markupsafe/_speedups.cpython-39-x86_64-linux-gnu.so
new file mode 100755
index 0000000..7c79765
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/markupsafe/_speedups.cpython-39-x86_64-linux-gnu.so
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/markupsafe/_speedups.pyi b/venv/lib/python3.9/site-packages/markupsafe/_speedups.pyi
new file mode 100644
index 0000000..f673240
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/markupsafe/_speedups.pyi
@@ -0,0 +1,9 @@
+from typing import Any
+from typing import Optional
+
+from . import Markup
+
+def escape(s: Any) -> Markup: ...
+def escape_silent(s: Optional[Any]) -> Markup: ...
+def soft_str(s: Any) -> str: ...
+def soft_unicode(s: Any) -> str: ...
diff --git a/venv/lib/python3.9/site-packages/markupsafe/py.typed b/venv/lib/python3.9/site-packages/markupsafe/py.typed
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/markupsafe/py.typed
diff --git a/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/INSTALLER b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/LICENSE.txt b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/LICENSE.txt
new file mode 100644
index 0000000..8e7b65e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/LICENSE.txt
@@ -0,0 +1,20 @@
+Copyright (c) 2008-present The pip developers (see AUTHORS.txt file)
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/METADATA b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/METADATA
new file mode 100644
index 0000000..e935e1a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/METADATA
@@ -0,0 +1,88 @@
+Metadata-Version: 2.1
+Name: pip
+Version: 22.3.1
+Summary: The PyPA recommended tool for installing Python packages.
+Home-page: https://pip.pypa.io/
+Author: The pip developers
+Author-email: distutils-sig@python.org
+License: MIT
+Project-URL: Documentation, https://pip.pypa.io
+Project-URL: Source, https://github.com/pypa/pip
+Project-URL: Changelog, https://pip.pypa.io/en/stable/news/
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Software Development :: Build Tools
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Requires-Python: >=3.7
+License-File: LICENSE.txt
+
+pip - The Python Package Installer
+==================================
+
+.. image:: https://img.shields.io/pypi/v/pip.svg
+   :target: https://pypi.org/project/pip/
+
+.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
+   :target: https://pip.pypa.io/en/latest
+
+pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
+
+Please take a look at our documentation for how to install and use pip:
+
+* `Installation`_
+* `Usage`_
+
+We release updates regularly, with a new version every 3 months. Find more details in our documentation:
+
+* `Release notes`_
+* `Release process`_
+
+In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right.
+
+**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3.
+
+If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
+
+* `Issue tracking`_
+* `Discourse channel`_
+* `User IRC`_
+
+If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
+
+* `GitHub page`_
+* `Development documentation`_
+* `Development IRC`_
+
+Code of Conduct
+---------------
+
+Everyone interacting in the pip project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
+
+.. _package installer: https://packaging.python.org/guides/tool-recommendations/
+.. _Python Package Index: https://pypi.org
+.. _Installation: https://pip.pypa.io/en/stable/installation/
+.. _Usage: https://pip.pypa.io/en/stable/
+.. _Release notes: https://pip.pypa.io/en/stable/news.html
+.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
+.. _GitHub page: https://github.com/pypa/pip
+.. _Development documentation: https://pip.pypa.io/en/latest/development
+.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html
+.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020
+.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html
+.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support
+.. _Issue tracking: https://github.com/pypa/pip/issues
+.. _Discourse channel: https://discuss.python.org/c/packaging
+.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa
+.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev
+.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
diff --git a/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/RECORD b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/RECORD
new file mode 100644
index 0000000..ba0a0f2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/RECORD
@@ -0,0 +1,993 @@
+../../../bin/pip,sha256=cKDI043nZX9pbtstqE5pQNod_739MbNN9OzsTI0Uo08,246

+../../../bin/pip3,sha256=cKDI043nZX9pbtstqE5pQNod_739MbNN9OzsTI0Uo08,246

+../../../bin/pip3.10,sha256=cKDI043nZX9pbtstqE5pQNod_739MbNN9OzsTI0Uo08,246

+../../../bin/pip3.9,sha256=cKDI043nZX9pbtstqE5pQNod_739MbNN9OzsTI0Uo08,246

+pip-22.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4

+pip-22.3.1.dist-info/LICENSE.txt,sha256=Y0MApmnUmurmWxLGxIySTFGkzfPR_whtw0VtyLyqIQQ,1093

+pip-22.3.1.dist-info/METADATA,sha256=a9COYc5qzklDgbGlrKYkypMXon4A6IDgpeUTWLr7zzY,4072

+pip-22.3.1.dist-info/RECORD,,

+pip-22.3.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip-22.3.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92

+pip-22.3.1.dist-info/entry_points.txt,sha256=ynZN1_707_L23Oa8_O5LOxEoccj1nDa4xHT5galfN7o,125

+pip-22.3.1.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4

+pip/__init__.py,sha256=Z2hXGRMvmdhpmmqr0OW1fA2Jje8tnmU0uzibRoUF-w8,357

+pip/__main__.py,sha256=mXwWDftNLMKfwVqKFWGE_uuBZvGSIiUELhLkeysIuZc,1198

+pip/__pip-runner__.py,sha256=EnrfKmKMzWAdqg_JicLCOP9Y95Ux7zHh4ObvqLtQcjo,1444

+pip/__pycache__/__init__.cpython-39.pyc,,

+pip/__pycache__/__main__.cpython-39.pyc,,

+pip/__pycache__/__pip-runner__.cpython-39.pyc,,

+pip/_internal/__init__.py,sha256=nnFCuxrPMgALrIDxSoy-H6Zj4W4UY60D-uL1aJyq0pc,573

+pip/_internal/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/__pycache__/build_env.cpython-39.pyc,,

+pip/_internal/__pycache__/cache.cpython-39.pyc,,

+pip/_internal/__pycache__/configuration.cpython-39.pyc,,

+pip/_internal/__pycache__/exceptions.cpython-39.pyc,,

+pip/_internal/__pycache__/main.cpython-39.pyc,,

+pip/_internal/__pycache__/pyproject.cpython-39.pyc,,

+pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc,,

+pip/_internal/__pycache__/wheel_builder.cpython-39.pyc,,

+pip/_internal/build_env.py,sha256=gEAT8R6SuWbg2mcrsmOTKWMw_x5pedMzvSTxQS57JZs,10234

+pip/_internal/cache.py,sha256=C3n78VnBga9rjPXZqht_4A4d-T25poC7K0qBM7FHDhU,10734

+pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132

+pip/_internal/cli/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc,,

+pip/_internal/cli/__pycache__/base_command.cpython-39.pyc,,

+pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc,,

+pip/_internal/cli/__pycache__/command_context.cpython-39.pyc,,

+pip/_internal/cli/__pycache__/main.cpython-39.pyc,,

+pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc,,

+pip/_internal/cli/__pycache__/parser.cpython-39.pyc,,

+pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc,,

+pip/_internal/cli/__pycache__/req_command.cpython-39.pyc,,

+pip/_internal/cli/__pycache__/spinners.cpython-39.pyc,,

+pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc,,

+pip/_internal/cli/autocompletion.py,sha256=wY2JPZY2Eji1vhR7bVo-yCBPJ9LCy6P80iOAhZD1Vi8,6676

+pip/_internal/cli/base_command.py,sha256=t1D5x40Hfn9HnPnMt-iSxvqL14nht2olBCacW74pc-k,7842

+pip/_internal/cli/cmdoptions.py,sha256=Jlarlzz9qv9tC_tCaEbcc_jVvrPreFLBBUnDgoyWflw,29381

+pip/_internal/cli/command_context.py,sha256=RHgIPwtObh5KhMrd3YZTkl8zbVG-6Okml7YbFX4Ehg0,774

+pip/_internal/cli/main.py,sha256=ioJ8IVlb2K1qLOxR-tXkee9lURhYV89CDM71MKag7YY,2472

+pip/_internal/cli/main_parser.py,sha256=laDpsuBDl6kyfywp9eMMA9s84jfH2TJJn-vmL0GG90w,4338

+pip/_internal/cli/parser.py,sha256=tWP-K1uSxnJyXu3WE0kkH3niAYRBeuUaxeydhzOdhL4,10817

+pip/_internal/cli/progress_bars.py,sha256=So4mPoSjXkXiSHiTzzquH3VVyVD_njXlHJSExYPXAow,1968

+pip/_internal/cli/req_command.py,sha256=ypTutLv4j_efxC2f6C6aCQufxre-zaJdi5m_tWlLeBk,18172

+pip/_internal/cli/spinners.py,sha256=hIJ83GerdFgFCdobIA23Jggetegl_uC4Sp586nzFbPE,5118

+pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116

+pip/_internal/commands/__init__.py,sha256=5oRO9O3dM2vGuh0bFw4HOVletryrz5HHMmmPWwJrH9U,3882

+pip/_internal/commands/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/cache.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/check.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/completion.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/configuration.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/debug.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/download.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/freeze.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/hash.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/help.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/index.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/inspect.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/install.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/list.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/search.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/show.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc,,

+pip/_internal/commands/__pycache__/wheel.cpython-39.pyc,,

+pip/_internal/commands/cache.py,sha256=muaT0mbL-ZUpn6AaushVAipzTiMwE4nV2BLbJBwt_KQ,7582

+pip/_internal/commands/check.py,sha256=0gjXR7j36xJT5cs2heYU_dfOfpnFfzX8OoPNNoKhqdM,1685

+pip/_internal/commands/completion.py,sha256=H0TJvGrdsoleuIyQKzJbicLFppYx2OZA0BLNpQDeFjI,4129

+pip/_internal/commands/configuration.py,sha256=NB5uf8HIX8-li95YLoZO09nALIWlLCHDF5aifSKcBn8,9815

+pip/_internal/commands/debug.py,sha256=kVjn-O1ixLk0webD0w9vfFFq_GCTUTd2hmLOnYtDCig,6573

+pip/_internal/commands/download.py,sha256=LwKEyYMG2L67nQRyGo8hQdNEeMU2bmGWqJfcB8JDXas,5289

+pip/_internal/commands/freeze.py,sha256=gCjoD6foBZPBAAYx5t8zZLkJhsF_ZRtnb3dPuD7beO8,2951

+pip/_internal/commands/hash.py,sha256=EVVOuvGtoPEdFi8SNnmdqlCQrhCxV-kJsdwtdcCnXGQ,1703

+pip/_internal/commands/help.py,sha256=gcc6QDkcgHMOuAn5UxaZwAStsRBrnGSn_yxjS57JIoM,1132

+pip/_internal/commands/index.py,sha256=1VVXXj5MsI2qH-N7uniQQyVkg-KCn_RdjiyiUmkUS5U,4762

+pip/_internal/commands/inspect.py,sha256=mRJ9aIkBQN0IJ7Um8pzaxAzVPIgL8KfWHx1fWKJgUAQ,3374

+pip/_internal/commands/install.py,sha256=_XbW0PyxtZCMMNqo8mDaOq3TBRiJNFM-94CR27mburc,31726

+pip/_internal/commands/list.py,sha256=Fk1TSxB33NlRS4qlLQ0xwnytnF9-zkQJbKQYv2xc4Q4,12343

+pip/_internal/commands/search.py,sha256=sbBZiARRc050QquOKcCvOr2K3XLsoYebLKZGRi__iUI,5697

+pip/_internal/commands/show.py,sha256=CJI8q4SSY0X346K1hi4Th8Nbyhl4nxPTBJUuzOlTaYE,6129

+pip/_internal/commands/uninstall.py,sha256=0JQhifYxecNrJAwoILFwjm9V1V3liXzNT-y4bgRXXPw,3680

+pip/_internal/commands/wheel.py,sha256=mbFJd4dmUfrVFJkQbK8n2zHyRcD3AI91f7EUo9l3KYg,7396

+pip/_internal/configuration.py,sha256=uBKTus43pDIO6IzT2mLWQeROmHhtnoabhniKNjPYvD0,13529

+pip/_internal/distributions/__init__.py,sha256=Hq6kt6gXBgjNit5hTTWLAzeCNOKoB-N0pGYSqehrli8,858

+pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/distributions/__pycache__/base.cpython-39.pyc,,

+pip/_internal/distributions/__pycache__/installed.cpython-39.pyc,,

+pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc,,

+pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc,,

+pip/_internal/distributions/base.py,sha256=jrF1Vi7eGyqFqMHrieh1PIOrGU7KeCxhYPZnbvtmvGY,1221

+pip/_internal/distributions/installed.py,sha256=NI2OgsgH9iBq9l5vB-56vOg5YsybOy-AU4VE5CSCO2I,729

+pip/_internal/distributions/sdist.py,sha256=SQBdkatXSigKGG_SaD0U0p1Jwdfrg26UCNcHgkXZfdA,6494

+pip/_internal/distributions/wheel.py,sha256=m-J4XO-gvFerlYsFzzSXYDvrx8tLZlJFTCgDxctn8ig,1164

+pip/_internal/exceptions.py,sha256=BfvcyN2iEv3Sf00SVmSk59lEeZEBHELqkuoN2KeIWKc,20942

+pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30

+pip/_internal/index/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/index/__pycache__/collector.cpython-39.pyc,,

+pip/_internal/index/__pycache__/package_finder.cpython-39.pyc,,

+pip/_internal/index/__pycache__/sources.cpython-39.pyc,,

+pip/_internal/index/collector.py,sha256=Pb9FW9STH2lwaApCIdMCivsbPP5pSYQp5bh3nLQBkDU,16503

+pip/_internal/index/package_finder.py,sha256=kmcMu5_i-BP6v3NQGY0_am1ezxM2Gk4t00arZMmm4sc,37596

+pip/_internal/index/sources.py,sha256=SVyPitv08-Qalh2_Bk5diAJ9GAA_d-a93koouQodAG0,6557

+pip/_internal/locations/__init__.py,sha256=QhB-Y6TNyaU010cimm2T4wM5loe8oRdjLwJ6xmsGc-k,17552

+pip/_internal/locations/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc,,

+pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc,,

+pip/_internal/locations/__pycache__/base.cpython-39.pyc,,

+pip/_internal/locations/_distutils.py,sha256=wgHDvHGNZHtlcHkQjYovHzkEUBzisR0iOh7OqCIkB5g,6302

+pip/_internal/locations/_sysconfig.py,sha256=nM-DiVHXWTxippdmN0MGVl5r7OIfIMy3vgDMlo8c_oo,7867

+pip/_internal/locations/base.py,sha256=ufyDqPwZ4jLbScD44u8AwTVI-3ft8O78UGrroQI5f68,2573

+pip/_internal/main.py,sha256=r-UnUe8HLo5XFJz8inTcOOTiu_sxNhgHb6VwlGUllOI,340

+pip/_internal/metadata/__init__.py,sha256=84j1dPJaIoz5Q2ZTPi0uB1iaDAHiUNfKtYSGQCfFKpo,4280

+pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/metadata/__pycache__/_json.cpython-39.pyc,,

+pip/_internal/metadata/__pycache__/base.cpython-39.pyc,,

+pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc,,

+pip/_internal/metadata/_json.py,sha256=BTkWfFDrWFwuSodImjtbAh8wCL3isecbnjTb5E6UUDI,2595

+pip/_internal/metadata/base.py,sha256=vIwIo1BtoqegehWMAXhNrpLGYBq245rcaCNkBMPnTU8,25277

+pip/_internal/metadata/importlib/__init__.py,sha256=9ZVO8BoE7NEZPmoHp5Ap_NJo0HgNIezXXg-TFTtt3Z4,107

+pip/_internal/metadata/importlib/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/metadata/importlib/__pycache__/_compat.cpython-39.pyc,,

+pip/_internal/metadata/importlib/__pycache__/_dists.cpython-39.pyc,,

+pip/_internal/metadata/importlib/__pycache__/_envs.cpython-39.pyc,,

+pip/_internal/metadata/importlib/_compat.py,sha256=GAe_prIfCE4iUylrnr_2dJRlkkBVRUbOidEoID7LPoE,1882

+pip/_internal/metadata/importlib/_dists.py,sha256=BUV8y6D0PePZrEN3vfJL-m1FDqZ6YPRgAiBeBinHhNg,8181

+pip/_internal/metadata/importlib/_envs.py,sha256=7BxanCh3T7arusys__O2ZHJdnmDhQXFmfU7x1-jB5xI,7457

+pip/_internal/metadata/pkg_resources.py,sha256=WjwiNdRsvxqxL4MA5Tb5a_q3Q3sUhdpbZF8wGLtPMI0,9773

+pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63

+pip/_internal/models/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/models/__pycache__/candidate.cpython-39.pyc,,

+pip/_internal/models/__pycache__/direct_url.cpython-39.pyc,,

+pip/_internal/models/__pycache__/format_control.cpython-39.pyc,,

+pip/_internal/models/__pycache__/index.cpython-39.pyc,,

+pip/_internal/models/__pycache__/installation_report.cpython-39.pyc,,

+pip/_internal/models/__pycache__/link.cpython-39.pyc,,

+pip/_internal/models/__pycache__/scheme.cpython-39.pyc,,

+pip/_internal/models/__pycache__/search_scope.cpython-39.pyc,,

+pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc,,

+pip/_internal/models/__pycache__/target_python.cpython-39.pyc,,

+pip/_internal/models/__pycache__/wheel.cpython-39.pyc,,

+pip/_internal/models/candidate.py,sha256=6pcABsaR7CfIHlbJbr2_kMkVJFL_yrYjTx6SVWUnCPQ,990

+pip/_internal/models/direct_url.py,sha256=HLO0sL2aYB6n45bwmd72TDN05sLHJlOQI8M01l2SH3I,5877

+pip/_internal/models/format_control.py,sha256=DJpMYjxeYKKQdwNcML2_F0vtAh-qnKTYe-CpTxQe-4g,2520

+pip/_internal/models/index.py,sha256=tYnL8oxGi4aSNWur0mG8DAP7rC6yuha_MwJO8xw0crI,1030

+pip/_internal/models/installation_report.py,sha256=ad1arqtxrSFBvWnm6mRqmG12HLV3pZZcZcHrlTFIiqU,2617

+pip/_internal/models/link.py,sha256=9HWL14UQTMxRCnY6dmAz09rGElJrMAcHn2OJZCBx0tk,18083

+pip/_internal/models/scheme.py,sha256=3EFQp_ICu_shH1-TBqhl0QAusKCPDFOlgHFeN4XowWs,738

+pip/_internal/models/search_scope.py,sha256=iGPQQ6a4Lau8oGQ_FWj8aRLik8A21o03SMO5KnSt-Cg,4644

+pip/_internal/models/selection_prefs.py,sha256=KZdi66gsR-_RUXUr9uejssk3rmTHrQVJWeNA2sV-VSY,1907

+pip/_internal/models/target_python.py,sha256=qKpZox7J8NAaPmDs5C_aniwfPDxzvpkrCKqfwndG87k,3858

+pip/_internal/models/wheel.py,sha256=YqazoIZyma_Q1ejFa1C7NHKQRRWlvWkdK96VRKmDBeI,3600

+pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50

+pip/_internal/network/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/network/__pycache__/auth.cpython-39.pyc,,

+pip/_internal/network/__pycache__/cache.cpython-39.pyc,,

+pip/_internal/network/__pycache__/download.cpython-39.pyc,,

+pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc,,

+pip/_internal/network/__pycache__/session.cpython-39.pyc,,

+pip/_internal/network/__pycache__/utils.cpython-39.pyc,,

+pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc,,

+pip/_internal/network/auth.py,sha256=a3C7Xaa8kTJjXkdi_wrUjqaySc8Z9Yz7U6QIbXfzMyc,12190

+pip/_internal/network/cache.py,sha256=hgXftU-eau4MWxHSLquTMzepYq5BPC2zhCkhN3glBy8,2145

+pip/_internal/network/download.py,sha256=HvDDq9bVqaN3jcS3DyVJHP7uTqFzbShdkf7NFSoHfkw,6096

+pip/_internal/network/lazy_wheel.py,sha256=PbPyuleNhtEq6b2S7rufoGXZWMD15FAGL4XeiAQ8FxA,7638

+pip/_internal/network/session.py,sha256=BpDOJ7_Xw5VkgPYWsePzcaqOfcyRZcB2AW7W0HGBST0,18443

+pip/_internal/network/utils.py,sha256=6A5SrUJEEUHxbGtbscwU2NpCyz-3ztiDlGWHpRRhsJ8,4073

+pip/_internal/network/xmlrpc.py,sha256=AzQgG4GgS152_cqmGr_Oz2MIXsCal-xfsis7fA7nmU0,1791

+pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_internal/operations/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/operations/__pycache__/check.cpython-39.pyc,,

+pip/_internal/operations/__pycache__/freeze.cpython-39.pyc,,

+pip/_internal/operations/__pycache__/prepare.cpython-39.pyc,,

+pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/operations/build/__pycache__/build_tracker.cpython-39.pyc,,

+pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc,,

+pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc,,

+pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc,,

+pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc,,

+pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc,,

+pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc,,

+pip/_internal/operations/build/build_tracker.py,sha256=vf81EwomN3xe9G8qRJED0VGqNikmRQRQoobNsxi5Xrs,4133

+pip/_internal/operations/build/metadata.py,sha256=ES_uRmAvhrNm_nDTpZxshBfUsvnXtkj-g_4rZrH9Rww,1404

+pip/_internal/operations/build/metadata_editable.py,sha256=_Rai0VZjxoeJUkjkuICrq45LtjwFoDOveosMYH43rKc,1456

+pip/_internal/operations/build/metadata_legacy.py,sha256=o-eU21As175hDC7dluM1fJJ_FqokTIShyWpjKaIpHZw,2198

+pip/_internal/operations/build/wheel.py,sha256=AO9XnTGhTgHtZmU8Dkbfo1OGr41rBuSDjIgAa4zUKgE,1063

+pip/_internal/operations/build/wheel_editable.py,sha256=TVETY-L_M_dSEKBhTIcQOP75zKVXw8tuq1U354Mm30A,1405

+pip/_internal/operations/build/wheel_legacy.py,sha256=C9j6rukgQI1n_JeQLoZGuDdfUwzCXShyIdPTp6edbMQ,3064

+pip/_internal/operations/check.py,sha256=ca4O9CkPt9Em9sLCf3H0iVt1GIcW7M8C0U5XooaBuT4,5109

+pip/_internal/operations/freeze.py,sha256=mwTZ2uML8aQgo3k8MR79a7SZmmmvdAJqdyaknKbavmg,9784

+pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51

+pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc,,

+pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc,,

+pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc,,

+pip/_internal/operations/install/editable_legacy.py,sha256=ee4kfJHNuzTdKItbfAsNOSEwq_vD7DRPGkBdK48yBhU,1354

+pip/_internal/operations/install/legacy.py,sha256=cHdcHebyzf8w7OaOLwcsTNSMSSV8WBoAPFLay_9CjE8,4105

+pip/_internal/operations/install/wheel.py,sha256=CxzEg2wTPX4SxNTPIx0ozTqF1X7LhpCyP3iM2FjcKUE,27407

+pip/_internal/operations/prepare.py,sha256=BeYXrLFpRoV5XBnRXQHxRA2plyC36kK9Pms5D9wjCo4,25091

+pip/_internal/pyproject.py,sha256=ob0Gb0l12YLZNxjdpZGRfWHgjqhZTnSVv96RuJyNOfs,7074

+pip/_internal/req/__init__.py,sha256=rUQ9d_Sh3E5kNYqX9pkN0D06YL-LrtcbJQ-LiIonq08,2807

+pip/_internal/req/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/req/__pycache__/constructors.cpython-39.pyc,,

+pip/_internal/req/__pycache__/req_file.cpython-39.pyc,,

+pip/_internal/req/__pycache__/req_install.cpython-39.pyc,,

+pip/_internal/req/__pycache__/req_set.cpython-39.pyc,,

+pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc,,

+pip/_internal/req/constructors.py,sha256=ypjtq1mOQ3d2mFkFPMf_6Mr8SLKeHQk3tUKHA1ddG0U,16611

+pip/_internal/req/req_file.py,sha256=N6lPO3c0to_G73YyGAnk7VUYmed5jV4Qxgmt1xtlXVg,17646

+pip/_internal/req/req_install.py,sha256=4tzyVGPHJ1-GXowm6PBT52BGIlbc4w7fhVqf-55bmRg,35600

+pip/_internal/req/req_set.py,sha256=j3esG0s6SzoVReX9rWn4rpYNtyET_fwxbwJPRimvRxo,2858

+pip/_internal/req/req_uninstall.py,sha256=ZFQfgSNz6H1BMsgl87nQNr2iaQCcbFcmXpW8rKVQcic,24045

+pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/resolution/__pycache__/base.cpython-39.pyc,,

+pip/_internal/resolution/base.py,sha256=qlmh325SBVfvG6Me9gc5Nsh5sdwHBwzHBq6aEXtKsLA,583

+pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc,,

+pip/_internal/resolution/legacy/resolver.py,sha256=9em8D5TcSsEN4xZM1WreaRShOnyM4LlvhMSHpUPsocE,24129

+pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc,,

+pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc,,

+pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc,,

+pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc,,

+pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc,,

+pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc,,

+pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc,,

+pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc,,

+pip/_internal/resolution/resolvelib/base.py,sha256=u1O4fkvCO4mhmu5i32xrDv9AX5NgUci_eYVyBDQhTIM,5220

+pip/_internal/resolution/resolvelib/candidates.py,sha256=6kQZeMzwibnL4lO6bW0hUQQjNEvXfADdFphRRkRvOtc,18963

+pip/_internal/resolution/resolvelib/factory.py,sha256=OnjkLIgyk5Tol7uOOqapA1D4qiRHWmPU18DF1yN5N8o,27878

+pip/_internal/resolution/resolvelib/found_candidates.py,sha256=hvL3Hoa9VaYo-qEOZkBi2Iqw251UDxPz-uMHVaWmLpE,5705

+pip/_internal/resolution/resolvelib/provider.py,sha256=Vd4jW_NnyifB-HMkPYtZIO70M3_RM0MbL5YV6XyBM-w,9914

+pip/_internal/resolution/resolvelib/reporter.py,sha256=3ZVVYrs5PqvLFJkGLcuXoMK5mTInFzl31xjUpDBpZZk,2526

+pip/_internal/resolution/resolvelib/requirements.py,sha256=B1ndvKPSuyyyTEXt9sKhbwminViSWnBrJa7qO2ln4Z0,5455

+pip/_internal/resolution/resolvelib/resolver.py,sha256=nYZ9bTFXj5c1ILKnkSgU7tUCTYyo5V5J-J0sKoA7Wzg,11533

+pip/_internal/self_outdated_check.py,sha256=R3MmjCyUt_lkUNMc6p3xVSx7vX28XiDh3VDs5OrYn6Q,8020

+pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_internal/utils/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/_log.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/compat.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/datetime.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/encoding.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/glibc.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/hashes.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/logging.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/misc.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/models.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/packaging.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/urls.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc,,

+pip/_internal/utils/__pycache__/wheel.cpython-39.pyc,,

+pip/_internal/utils/_log.py,sha256=-jHLOE_THaZz5BFcCnoSL9EYAtJ0nXem49s9of4jvKw,1015

+pip/_internal/utils/appdirs.py,sha256=swgcTKOm3daLeXTW6v5BUS2Ti2RvEnGRQYH_yDXklAo,1665

+pip/_internal/utils/compat.py,sha256=ACyBfLgj3_XG-iA5omEDrXqDM0cQKzi8h8HRBInzG6Q,1884

+pip/_internal/utils/compatibility_tags.py,sha256=ydin8QG8BHqYRsPY4OL6cmb44CbqXl1T0xxS97VhHkk,5377

+pip/_internal/utils/datetime.py,sha256=m21Y3wAtQc-ji6Veb6k_M5g6A0ZyFI4egchTdnwh-pQ,242

+pip/_internal/utils/deprecation.py,sha256=OLc7GzDwPob9y8jscDYCKUNBV-9CWwqFplBOJPLOpBM,5764

+pip/_internal/utils/direct_url_helpers.py,sha256=6F1tc2rcKaCZmgfVwsE6ObIe_Pux23mUVYA-2D9wCFc,3206

+pip/_internal/utils/distutils_args.py,sha256=bYUt4wfFJRaeGO4VHia6FNaA8HlYXMcKuEq1zYijY5g,1115

+pip/_internal/utils/egg_link.py,sha256=5MVlpz5LirT4iLQq86OYzjXaYF0D4Qk1dprEI7ThST4,2203

+pip/_internal/utils/encoding.py,sha256=qqsXDtiwMIjXMEiIVSaOjwH5YmirCaK-dIzb6-XJsL0,1169

+pip/_internal/utils/entrypoints.py,sha256=YlhLTRl2oHBAuqhc-zmL7USS67TPWVHImjeAQHreZTQ,3064

+pip/_internal/utils/filesystem.py,sha256=RhMIXUaNVMGjc3rhsDahWQ4MavvEQDdqXqgq-F6fpw8,5122

+pip/_internal/utils/filetypes.py,sha256=i8XAQ0eFCog26Fw9yV0Yb1ygAqKYB1w9Cz9n0fj8gZU,716

+pip/_internal/utils/glibc.py,sha256=tDfwVYnJCOC0BNVpItpy8CGLP9BjkxFHdl0mTS0J7fc,3110

+pip/_internal/utils/hashes.py,sha256=1WhkVNIHNfuYLafBHThIjVKGplxFJXSlQtuG2mXNlJI,4831

+pip/_internal/utils/inject_securetransport.py,sha256=o-QRVMGiENrTJxw3fAhA7uxpdEdw6M41TjHYtSVRrcg,795

+pip/_internal/utils/logging.py,sha256=U2q0i1n8hPS2gQh8qcocAg5dovGAa_bR24akmXMzrk4,11632

+pip/_internal/utils/misc.py,sha256=49Rs2NgrD4JGTKFt0farCm7FIAi-rjyoxgioArhCW_0,21617

+pip/_internal/utils/models.py,sha256=5GoYU586SrxURMvDn_jBMJInitviJg4O5-iOU-6I0WY,1193

+pip/_internal/utils/packaging.py,sha256=5Wm6_x7lKrlqVjPI5MBN_RurcRHwVYoQ7Ksrs84de7s,2108

+pip/_internal/utils/setuptools_build.py,sha256=4i3CuS34yNrkePnZ73rR47pyDzpZBo-SX9V5PNDSSHY,5662

+pip/_internal/utils/subprocess.py,sha256=MYySbvY7qBevRxq_RFfOsDqG4vMqrB4vDoL_eyPE6Bo,9197

+pip/_internal/utils/temp_dir.py,sha256=aCX489gRa4Nu0dMKRFyGhV6maJr60uEynu5uCbKR4Qg,7702

+pip/_internal/utils/unpacking.py,sha256=SBb2iV1crb89MDRTEKY86R4A_UOWApTQn9VQVcMDOlE,8821

+pip/_internal/utils/urls.py,sha256=AhaesUGl-9it6uvG6fsFPOr9ynFpGaTMk4t5XTX7Z_Q,1759

+pip/_internal/utils/virtualenv.py,sha256=4_48qMzCwB_F5jIK5BC_ua7uiAMVifmQWU9NdaGUoVA,3459

+pip/_internal/utils/wheel.py,sha256=lXOgZyTlOm5HmK8tw5iw0A3_5A6wRzsXHOaQkIvvloU,4549

+pip/_internal/vcs/__init__.py,sha256=UAqvzpbi0VbZo3Ub6skEeZAw-ooIZR-zX_WpCbxyCoU,596

+pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc,,

+pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc,,

+pip/_internal/vcs/__pycache__/git.cpython-39.pyc,,

+pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc,,

+pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc,,

+pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc,,

+pip/_internal/vcs/bazaar.py,sha256=zq-Eu2NtJffc6kOsyv2kmRTnKg9qeIXE-KH5JeKck70,3518

+pip/_internal/vcs/git.py,sha256=mjhwudCx9WlLNkxZ6_kOKmueF0rLoU2i1xeASKF6yiQ,18116

+pip/_internal/vcs/mercurial.py,sha256=Bzbd518Jsx-EJI0IhIobiQqiRsUv5TWYnrmRIFWE0Gw,5238

+pip/_internal/vcs/subversion.py,sha256=AeUVE9d9qp-0QSOMiUvuFHy1TK950E3QglN7ipP13sI,11728

+pip/_internal/vcs/versioncontrol.py,sha256=KUOc-hN51em9jrqxKwUR3JnkgSE-xSOqMiiJcSaL6B8,22811

+pip/_internal/wheel_builder.py,sha256=8cObBCu4mIsMJqZM7xXI9DO3vldiAnRNa1Gt6izPPTs,13079

+pip/_vendor/__init__.py,sha256=fNxOSVD0auElsD8fN9tuq5psfgMQ-RFBtD4X5gjlRkg,4966

+pip/_vendor/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/__pycache__/six.cpython-39.pyc,,

+pip/_vendor/__pycache__/typing_extensions.cpython-39.pyc,,

+pip/_vendor/cachecontrol/__init__.py,sha256=hrxlv3q7upsfyMw8k3gQ9vagBax1pYHSGGqYlZ0Zk0M,465

+pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc,,

+pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc,,

+pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc,,

+pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc,,

+pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc,,

+pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc,,

+pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc,,

+pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc,,

+pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc,,

+pip/_vendor/cachecontrol/_cmd.py,sha256=lxUXqfNTVx84zf6tcWbkLZHA6WVBRtJRpfeA9ZqhaAY,1379

+pip/_vendor/cachecontrol/adapter.py,sha256=ew9OYEQHEOjvGl06ZsuX8W3DAvHWsQKHwWAxISyGug8,5033

+pip/_vendor/cachecontrol/cache.py,sha256=Tty45fOjH40fColTGkqKQvQQmbYsMpk-nCyfLcv2vG4,1535

+pip/_vendor/cachecontrol/caches/__init__.py,sha256=h-1cUmOz6mhLsjTjOrJ8iPejpGdLCyG4lzTftfGZvLg,242

+pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc,,

+pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc,,

+pip/_vendor/cachecontrol/caches/file_cache.py,sha256=GpexcE29LoY4MaZwPUTcUBZaDdcsjqyLxZFznk8Hbr4,5271

+pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=mp-QWonP40I3xJGK3XVO-Gs9a3UjzlqqEmp9iLJH9F4,1033

+pip/_vendor/cachecontrol/compat.py,sha256=LNx7vqBndYdHU8YuJt53ab_8rzMGTXVrvMb7CZJkxG0,778

+pip/_vendor/cachecontrol/controller.py,sha256=bAYrt7x_VH4toNpI066LQxbHpYGpY1MxxmZAhspplvw,16416

+pip/_vendor/cachecontrol/filewrapper.py,sha256=X4BAQOO26GNOR7nH_fhTzAfeuct2rBQcx_15MyFBpcs,3946

+pip/_vendor/cachecontrol/heuristics.py,sha256=8kAyuZLSCyEIgQr6vbUwfhpqg9ows4mM0IV6DWazevI,4154

+pip/_vendor/cachecontrol/serialize.py,sha256=_U1NU_C-SDgFzkbAxAsPDgMTHeTWZZaHCQnZN_jh0U8,7105

+pip/_vendor/cachecontrol/wrapper.py,sha256=X3-KMZ20Ho3VtqyVaXclpeQpFzokR5NE8tZSfvKVaB8,774

+pip/_vendor/certifi/__init__.py,sha256=luDjIGxDSrQ9O0zthdz5Lnt069Z_7eR1GIEefEaf-Ys,94

+pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255

+pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc,,

+pip/_vendor/certifi/__pycache__/core.cpython-39.pyc,,

+pip/_vendor/certifi/cacert.pem,sha256=3l8CcWt_qL42030rGieD3SLufICFX0bYtGhDl_EXVPI,286370

+pip/_vendor/certifi/core.py,sha256=ZwiOsv-sD_ouU1ft8wy_xZ3LQ7UbcVzyqj2XNyrsZis,4279

+pip/_vendor/chardet/__init__.py,sha256=9-r0i294avRciob2HKVcKf6GJmXPHpgMqIijVrqHBDU,3705

+pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/johabfreq.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/johabprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/utf1632prober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc,,

+pip/_vendor/chardet/__pycache__/version.cpython-39.pyc,,

+pip/_vendor/chardet/big5freq.py,sha256=ltcfP-3PjlNHCoo5e4a7C4z-2DhBTXRfY6jbMbB7P30,31274

+pip/_vendor/chardet/big5prober.py,sha256=neUXIlq35507yibstiznZWFzyNcMn6EXrqJaUJVPWKg,1741

+pip/_vendor/chardet/chardistribution.py,sha256=M9NTKdM72KieFKy4TT5eml4PP0WaVcXuY5PpWSFD0FA,9608

+pip/_vendor/chardet/charsetgroupprober.py,sha256=CaIBAmNitEsYuSgMvgAsMREN4cLxMj5OYwMhVo6MAxk,3817

+pip/_vendor/chardet/charsetprober.py,sha256=Eo3w8sCmbvnVKOGNW1iy50KATVs8xV-gF7cQ0VG85dQ,4801

+pip/_vendor/chardet/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc,,

+pip/_vendor/chardet/cli/chardetect.py,sha256=1qMxT3wrp5vP6ugSf1-Zz3BWwlbCWJ0jzeCuhgX85vw,2406

+pip/_vendor/chardet/codingstatemachine.py,sha256=BiGR9kgTYbS4gJI5qBmE52HMOBOR_roDvXf7aIehdEk,3559

+pip/_vendor/chardet/cp949prober.py,sha256=kCQEaOCzMntqv7pAyXEobWTRgIUxYfoiUr0btXO1nI8,1838

+pip/_vendor/chardet/enums.py,sha256=Rodw4p61Vg9U-oCo6eUuT7uDzKwIbCaA15HwbvCoCNk,1619

+pip/_vendor/chardet/escprober.py,sha256=girD61r3NsQLnMQXsWWBU4hHuRJzTH3V7-VfTUr-nQY,3864

+pip/_vendor/chardet/escsm.py,sha256=0Vs4iPPovberMoSxxnK5pI161Xf-mtKgOl14g5Xc7zg,12021

+pip/_vendor/chardet/eucjpprober.py,sha256=pGgs4lINwCEDV2bxqIZ6hXpaj2j4l2oLsMx6kuOK_zQ,3676

+pip/_vendor/chardet/euckrfreq.py,sha256=3mHuRvXfsq_QcQysDQFb8qSudvTiol71C6Ic2w57tKM,13566

+pip/_vendor/chardet/euckrprober.py,sha256=qBuSS2zXWaoUmGdzz3owAnD1GNhuKR_8bYzDC3yxe6I,1731

+pip/_vendor/chardet/euctwfreq.py,sha256=2alILE1Lh5eqiFJZjzRkMQXolNJRHY5oBQd-vmZYFFM,36913

+pip/_vendor/chardet/euctwprober.py,sha256=SLnCoJC94jZL8PJio60Q8PZACJA1rVPtUdWMa1W8Pwk,1731

+pip/_vendor/chardet/gb2312freq.py,sha256=49OrdXzD-HXqwavkqjo8Z7gvs58hONNzDhAyMENNkvY,20735

+pip/_vendor/chardet/gb2312prober.py,sha256=NS_i52jZE0TnWGkKqFduvu9fzW0nMcS2XbYJ8qSX8hY,1737

+pip/_vendor/chardet/hebrewprober.py,sha256=1l1hXF8-2IWDrPkf85UvAO1GVtMfY1r11kDgOqa-gU4,13919

+pip/_vendor/chardet/jisfreq.py,sha256=mm8tfrwqhpOd3wzZKS4NJqkYBQVcDfTM2JiQ5aW932E,25796

+pip/_vendor/chardet/johabfreq.py,sha256=dBpOYG34GRX6SL8k_LbS9rxZPMjLjoMlgZ03Pz5Hmqc,42498

+pip/_vendor/chardet/johabprober.py,sha256=C18osd4vMPfy9facw-Y1Lor_9UrW0PeV-zxM2fu441c,1730

+pip/_vendor/chardet/jpcntx.py,sha256=m1gDpPkRca4EDwym8XSL5YdoILFnFsDbNBYMQV7_-NE,26797

+pip/_vendor/chardet/langbulgarianmodel.py,sha256=vmbvYFP8SZkSxoBvLkFqKiH1sjma5ihk3PTpdy71Rr4,104562

+pip/_vendor/chardet/langgreekmodel.py,sha256=JfB7bupjjJH2w3X_mYnQr9cJA_7EuITC2cRW13fUjeI,98484

+pip/_vendor/chardet/langhebrewmodel.py,sha256=3HXHaLQPNAGcXnJjkIJfozNZLTvTJmf4W5Awi6zRRKc,98196

+pip/_vendor/chardet/langhungarianmodel.py,sha256=WxbeQIxkv8YtApiNqxQcvj-tMycsoI4Xy-fwkDHpP_Y,101363

+pip/_vendor/chardet/langrussianmodel.py,sha256=s395bTZ87ESTrZCOdgXbEjZ9P1iGPwCl_8xSsac_DLY,128035

+pip/_vendor/chardet/langthaimodel.py,sha256=7bJlQitRpTnVGABmbSznHnJwOHDy3InkTvtFUx13WQI,102774

+pip/_vendor/chardet/langturkishmodel.py,sha256=XY0eGdTIy4eQ9Xg1LVPZacb-UBhHBR-cq0IpPVHowKc,95372

+pip/_vendor/chardet/latin1prober.py,sha256=u_iGcQMUcZLXvj4B_WXx4caA0C5oaE2Qj1KTpz_RQ1I,5260

+pip/_vendor/chardet/mbcharsetprober.py,sha256=iKKuB6o_FF80NynRLBDT0UtwOnpLqmL_OspRPMib7CM,3367

+pip/_vendor/chardet/mbcsgroupprober.py,sha256=1D_kp9nv2_NQRddq9I2WDvB35OJh7Tfpo-OYTnL3B5o,2056

+pip/_vendor/chardet/mbcssm.py,sha256=EfORNu1WXgnFvpFarU8uJHS8KFif63xmgrHOB4DdDdY,30068

+pip/_vendor/chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/chardet/metadata/__pycache__/languages.cpython-39.pyc,,

+pip/_vendor/chardet/metadata/languages.py,sha256=HcaBygWtZq3gR8prIkJp_etvkhm2V4pUIToqjPZhgrc,13280

+pip/_vendor/chardet/sbcharsetprober.py,sha256=VvtWiNRLbHDZ5xgnofsmP1u8VQIkkaAuw3Ir9m1zDzQ,6199

+pip/_vendor/chardet/sbcsgroupprober.py,sha256=mekr4E3hgT4onmwi8oi1iEGW1CN-Z-BArG6kOtCunJw,4129

+pip/_vendor/chardet/sjisprober.py,sha256=sLfWS25PVFr5cDGhEf6h_s-RJsyeSteA-4ynsTl_UvA,3749

+pip/_vendor/chardet/universaldetector.py,sha256=BHeNWt1kn0yQgnR6xNtLAjiNmEQpSHYlKEvuZ9QyR1k,13288

+pip/_vendor/chardet/utf1632prober.py,sha256=N42YJEOkVDB67c38t5aJhXMG1QvnyWWDMNY5ERzniU0,8289

+pip/_vendor/chardet/utf8prober.py,sha256=mnLaSBV4gg-amt2WmxKFKWy4vVBedMNgjdbvgzBo0Dc,2709

+pip/_vendor/chardet/version.py,sha256=u_QYi-DXU1s7fyC_Rwa0I0-UcxMVmH7Co6c7QGKbe3g,242

+pip/_vendor/colorama/__init__.py,sha256=ihDoWQOkapwF7sqQ99AoDoEF3vGYm40OtmgW211cLZw,239

+pip/_vendor/colorama/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/colorama/__pycache__/ansi.cpython-39.pyc,,

+pip/_vendor/colorama/__pycache__/ansitowin32.cpython-39.pyc,,

+pip/_vendor/colorama/__pycache__/initialise.cpython-39.pyc,,

+pip/_vendor/colorama/__pycache__/win32.cpython-39.pyc,,

+pip/_vendor/colorama/__pycache__/winterm.cpython-39.pyc,,

+pip/_vendor/colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522

+pip/_vendor/colorama/ansitowin32.py,sha256=gGrO7MVtwc-j1Sq3jKfZpERT1JWmYSOsTVDiTnFbZU4,10830

+pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915

+pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404

+pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438

+pip/_vendor/distlib/__init__.py,sha256=acgfseOC55dNrVAzaBKpUiH3Z6V7Q1CaxsiQ3K7pC-E,581

+pip/_vendor/distlib/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/compat.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/database.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/index.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/locators.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/manifest.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/markers.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/metadata.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/resources.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/scripts.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/util.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/version.cpython-39.pyc,,

+pip/_vendor/distlib/__pycache__/wheel.cpython-39.pyc,,

+pip/_vendor/distlib/compat.py,sha256=tfoMrj6tujk7G4UC2owL6ArgDuCKabgBxuJRGZSmpko,41259

+pip/_vendor/distlib/database.py,sha256=o_mw0fAr93NDAHHHfqG54Y1Hi9Rkfrp2BX15XWZYK50,51697

+pip/_vendor/distlib/index.py,sha256=HFiDG7LMoaBs829WuotrfIwcErOOExUOR_AeBtw_TCU,20834

+pip/_vendor/distlib/locators.py,sha256=wNzG-zERzS_XGls-nBPVVyLRHa2skUlkn0-5n0trMWA,51991

+pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811

+pip/_vendor/distlib/markers.py,sha256=TpHHHLgkzyT7YHbwj-2i6weRaq-Ivy2-MUnrDkjau-U,5058

+pip/_vendor/distlib/metadata.py,sha256=g_DIiu8nBXRzA-mWPRpatHGbmFZqaFoss7z9TG7QSUU,39801

+pip/_vendor/distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820

+pip/_vendor/distlib/scripts.py,sha256=BmkTKmiTk4m2cj-iueliatwz3ut_9SsABBW51vnQnZU,18102

+pip/_vendor/distlib/t32.exe,sha256=a0GV5kCoWsMutvliiCKmIgV98eRZ33wXoS-XrqvJQVs,97792

+pip/_vendor/distlib/t64-arm.exe,sha256=68TAa32V504xVBnufojh0PcenpR3U4wAqTqf-MZqbPw,182784

+pip/_vendor/distlib/t64.exe,sha256=gaYY8hy4fbkHYTTnA4i26ct8IQZzkBG2pRdy0iyuBrc,108032

+pip/_vendor/distlib/util.py,sha256=31dPXn3Rfat0xZLeVoFpuniyhe6vsbl9_QN-qd9Lhlk,66262

+pip/_vendor/distlib/version.py,sha256=WG__LyAa2GwmA6qSoEJtvJE8REA1LZpbSizy8WvhJLk,23513

+pip/_vendor/distlib/w32.exe,sha256=R4csx3-OGM9kL4aPIzQKRo5TfmRSHZo6QWyLhDhNBks,91648

+pip/_vendor/distlib/w64-arm.exe,sha256=xdyYhKj0WDcVUOCb05blQYvzdYIKMbmJn2SZvzkcey4,168448

+pip/_vendor/distlib/w64.exe,sha256=ejGf-rojoBfXseGLpya6bFTFPWRG21X5KvU8J5iU-K0,101888

+pip/_vendor/distlib/wheel.py,sha256=Rgqs658VsJ3R2845qwnZD8XQryV2CzWw2mghwLvxxsI,43898

+pip/_vendor/distro/__init__.py,sha256=2fHjF-SfgPvjyNZ1iHh_wjqWdR_Yo5ODHwZC0jLBPhc,981

+pip/_vendor/distro/__main__.py,sha256=bu9d3TifoKciZFcqRBuygV3GSuThnVD_m2IK4cz96Vs,64

+pip/_vendor/distro/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/distro/__pycache__/__main__.cpython-39.pyc,,

+pip/_vendor/distro/__pycache__/distro.cpython-39.pyc,,

+pip/_vendor/distro/distro.py,sha256=UYQG_9H_iSOt422uasA92HlY7aXeTnWKdV-IhsSAdwQ,48841

+pip/_vendor/idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849

+pip/_vendor/idna/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/idna/__pycache__/codec.cpython-39.pyc,,

+pip/_vendor/idna/__pycache__/compat.cpython-39.pyc,,

+pip/_vendor/idna/__pycache__/core.cpython-39.pyc,,

+pip/_vendor/idna/__pycache__/idnadata.cpython-39.pyc,,

+pip/_vendor/idna/__pycache__/intranges.cpython-39.pyc,,

+pip/_vendor/idna/__pycache__/package_data.cpython-39.pyc,,

+pip/_vendor/idna/__pycache__/uts46data.cpython-39.pyc,,

+pip/_vendor/idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374

+pip/_vendor/idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321

+pip/_vendor/idna/core.py,sha256=1JxchwKzkxBSn7R_oCE12oBu3eVux0VzdxolmIad24M,12950

+pip/_vendor/idna/idnadata.py,sha256=xUjqKqiJV8Ho_XzBpAtv5JFoVPSupK-SUXvtjygUHqw,44375

+pip/_vendor/idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881

+pip/_vendor/idna/package_data.py,sha256=C_jHJzmX8PI4xq0jpzmcTMxpb5lDsq4o5VyxQzlVrZE,21

+pip/_vendor/idna/uts46data.py,sha256=zvjZU24s58_uAS850Mcd0NnD0X7_gCMAMjzWNIeUJdc,206539

+pip/_vendor/msgpack/__init__.py,sha256=NryGaKLDk_Egd58ZxXpnuI7OWO27AXz7S6CBFRM3sAY,1132

+pip/_vendor/msgpack/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/msgpack/__pycache__/exceptions.cpython-39.pyc,,

+pip/_vendor/msgpack/__pycache__/ext.cpython-39.pyc,,

+pip/_vendor/msgpack/__pycache__/fallback.cpython-39.pyc,,

+pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081

+pip/_vendor/msgpack/ext.py,sha256=TuldJPkYu8Wo_Xh0tFGL2l06-gY88NSR8tOje9fo2Wg,6080

+pip/_vendor/msgpack/fallback.py,sha256=OORDn86-fHBPlu-rPlMdM10KzkH6S_Rx9CHN1b7o4cg,34557

+pip/_vendor/packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661

+pip/_vendor/packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497

+pip/_vendor/packaging/__pycache__/__about__.cpython-39.pyc,,

+pip/_vendor/packaging/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/packaging/__pycache__/_manylinux.cpython-39.pyc,,

+pip/_vendor/packaging/__pycache__/_musllinux.cpython-39.pyc,,

+pip/_vendor/packaging/__pycache__/_structures.cpython-39.pyc,,

+pip/_vendor/packaging/__pycache__/markers.cpython-39.pyc,,

+pip/_vendor/packaging/__pycache__/requirements.cpython-39.pyc,,

+pip/_vendor/packaging/__pycache__/specifiers.cpython-39.pyc,,

+pip/_vendor/packaging/__pycache__/tags.cpython-39.pyc,,

+pip/_vendor/packaging/__pycache__/utils.cpython-39.pyc,,

+pip/_vendor/packaging/__pycache__/version.cpython-39.pyc,,

+pip/_vendor/packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488

+pip/_vendor/packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378

+pip/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431

+pip/_vendor/packaging/markers.py,sha256=AJBOcY8Oq0kYc570KuuPTkvuqjAlhufaE2c9sCUbm64,8487

+pip/_vendor/packaging/requirements.py,sha256=NtDlPBtojpn1IUC85iMjPNsUmufjpSlwnNA-Xb4m5NA,4676

+pip/_vendor/packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110

+pip/_vendor/packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699

+pip/_vendor/packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200

+pip/_vendor/packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665

+pip/_vendor/pep517/__init__.py,sha256=QJpRfzTpk6YSPgjcxp9-MCAiS5dEdzf9Bh0UXophG6c,130

+pip/_vendor/pep517/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/pep517/__pycache__/_compat.cpython-39.pyc,,

+pip/_vendor/pep517/__pycache__/build.cpython-39.pyc,,

+pip/_vendor/pep517/__pycache__/check.cpython-39.pyc,,

+pip/_vendor/pep517/__pycache__/colorlog.cpython-39.pyc,,

+pip/_vendor/pep517/__pycache__/dirtools.cpython-39.pyc,,

+pip/_vendor/pep517/__pycache__/envbuild.cpython-39.pyc,,

+pip/_vendor/pep517/__pycache__/meta.cpython-39.pyc,,

+pip/_vendor/pep517/__pycache__/wrappers.cpython-39.pyc,,

+pip/_vendor/pep517/_compat.py,sha256=by6evrYnqkisiM-MQcvOKs5bgDMzlOSgZqRHNqf04zE,138

+pip/_vendor/pep517/build.py,sha256=VLtq0hOvNWCfX0FkdvTKEr-TmyrbaX0UqghpU7bHO1w,3443

+pip/_vendor/pep517/check.py,sha256=o0Mp_PX1yOM2WNq1ZdDph3YA7RObj2UGQUCUF-46RaU,6083

+pip/_vendor/pep517/colorlog.py,sha256=eCV1W52xzBjA-sOlKzUcvabRiFa11Y7hA791u-85_c8,3994

+pip/_vendor/pep517/dirtools.py,sha256=JiZ1Hlt2LNaLZEhNa_pm1YyG3MUoRh7KxY6hJ8ac-w0,607

+pip/_vendor/pep517/envbuild.py,sha256=nkTt1ZY7MXVgYOhPTyTr-VOxQ-q_Qc1touXfQgM56Bs,6081

+pip/_vendor/pep517/in_process/__init__.py,sha256=4yDanGyKTXQtLhqRo9eEZ1CsLFezEAEZMfqEd88xrvY,872

+pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-39.pyc,,

+pip/_vendor/pep517/in_process/_in_process.py,sha256=JDpTxlKMDN1QfN_ey4IDtE6ZVSWtzP0_WLSqt1TyGaA,10801

+pip/_vendor/pep517/meta.py,sha256=budDWsV3I2OnnpSvXQ_ycuTqxh8G7DABoazAq-j8OlQ,2520

+pip/_vendor/pep517/wrappers.py,sha256=jcxIy-1Kl8I2xAZgbr6qNjF5b_6Q5gTndf9cxF0p5gM,12721

+pip/_vendor/pkg_resources/__init__.py,sha256=NnpQ3g6BCHzpMgOR_OLBmYtniY4oOzdKpwqghfq_6ug,108287

+pip/_vendor/pkg_resources/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-39.pyc,,

+pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562

+pip/_vendor/platformdirs/__init__.py,sha256=x0aUmmovXXuRFVrVQBtwIiovX12B7rUkdV4F9UlLz0Y,12831

+pip/_vendor/platformdirs/__main__.py,sha256=ZmsnTxEOxtTvwa-Y_Vfab_JN3X4XCVeN8X0yyy9-qnc,1176

+pip/_vendor/platformdirs/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/platformdirs/__pycache__/__main__.cpython-39.pyc,,

+pip/_vendor/platformdirs/__pycache__/android.cpython-39.pyc,,

+pip/_vendor/platformdirs/__pycache__/api.cpython-39.pyc,,

+pip/_vendor/platformdirs/__pycache__/macos.cpython-39.pyc,,

+pip/_vendor/platformdirs/__pycache__/unix.cpython-39.pyc,,

+pip/_vendor/platformdirs/__pycache__/version.cpython-39.pyc,,

+pip/_vendor/platformdirs/__pycache__/windows.cpython-39.pyc,,

+pip/_vendor/platformdirs/android.py,sha256=GKizhyS7ESRiU67u8UnBJLm46goau9937EchXWbPBlk,4068

+pip/_vendor/platformdirs/api.py,sha256=MXKHXOL3eh_-trSok-JUTjAR_zjmmKF3rjREVABjP8s,4910

+pip/_vendor/platformdirs/macos.py,sha256=-3UXQewbT0yMhMdkzRXfXGAntmLIH7Qt4a9Hlf8I5_Y,2655

+pip/_vendor/platformdirs/unix.py,sha256=b4aVYTz0qZ50HntwOXo8r6tp82jAa3qTjxw-WlnC2yc,6910

+pip/_vendor/platformdirs/version.py,sha256=tsBKKPDX3LLh39yHXeTYauGRbRd-AmOJr9SwKldlFIU,78

+pip/_vendor/platformdirs/windows.py,sha256=ISruopR5UGBePC0BxCxXevkZYfjJsIZc49YWU5iYfQ4,6439

+pip/_vendor/pygments/__init__.py,sha256=5oLcMLXD0cTG8YcHBPITtK1fS0JBASILEvEnWkTezgE,2999

+pip/_vendor/pygments/__main__.py,sha256=p0_rz3JZmNZMNZBOqDojaEx1cr9wmA9FQZX_TYl74lQ,353

+pip/_vendor/pygments/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/__main__.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/cmdline.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/console.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/filter.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/formatter.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/lexer.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/modeline.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/plugin.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/regexopt.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/scanner.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/sphinxext.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/style.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/token.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/unistring.cpython-39.pyc,,

+pip/_vendor/pygments/__pycache__/util.cpython-39.pyc,,

+pip/_vendor/pygments/cmdline.py,sha256=rc0fah4eknRqFgn1wKNEwkq0yWnSqYOGaA4PaIeOxVY,23685

+pip/_vendor/pygments/console.py,sha256=hQfqCFuOlGk7DW2lPQYepsw-wkOH1iNt9ylNA1eRymM,1697

+pip/_vendor/pygments/filter.py,sha256=NglMmMPTRRv-zuRSE_QbWid7JXd2J4AvwjCW2yWALXU,1938

+pip/_vendor/pygments/filters/__init__.py,sha256=b5YuXB9rampSy2-cMtKxGQoMDfrG4_DcvVwZrzTlB6w,40386

+pip/_vendor/pygments/filters/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/pygments/formatter.py,sha256=6-TS2Y8pUMeWIUolWwr1O8ruC-U6HydWDwOdbAiJgJQ,2917

+pip/_vendor/pygments/formatters/__init__.py,sha256=YTqGeHS17fNXCLMZpf7oCxBCKLB9YLsZ8IAsjGhawyg,4810

+pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/groff.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/html.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/img.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/irc.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/latex.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/other.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/svg.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-39.pyc,,

+pip/_vendor/pygments/formatters/_mapping.py,sha256=fCZgvsM6UEuZUG7J6lr47eVss5owKd_JyaNbDfxeqmQ,4104

+pip/_vendor/pygments/formatters/bbcode.py,sha256=JrL4ITjN-KzPcuQpPMBf1pm33eW2sDUNr8WzSoAJsJA,3314

+pip/_vendor/pygments/formatters/groff.py,sha256=xrOFoLbafSA9uHsSLRogy79_Zc4GWJ8tMK2hCdTJRsw,5086

+pip/_vendor/pygments/formatters/html.py,sha256=QNt9prPgxmbKx2M-nfDwoR1bIg06-sNouQuWnE434Wc,35441

+pip/_vendor/pygments/formatters/img.py,sha256=h75Y7IRZLZxDEIwyoOsdRLTwm7kLVPbODKkgEiJ0iKI,21938

+pip/_vendor/pygments/formatters/irc.py,sha256=iwk5tDJOxbCV64SCmOFyvk__x6RD60ay0nUn7ko9n7U,5871

+pip/_vendor/pygments/formatters/latex.py,sha256=thPbytJCIs2AUXsO3NZwqKtXJ-upOlcXP4CXsx94G4w,19351

+pip/_vendor/pygments/formatters/other.py,sha256=PczqK1Rms43lz6iucOLPeBMxIncPKOGBt-195w1ynII,5073

+pip/_vendor/pygments/formatters/pangomarkup.py,sha256=ZZzMsKJKXrsDniFeMTkIpe7aQ4VZYRHu0idWmSiUJ2U,2212

+pip/_vendor/pygments/formatters/rtf.py,sha256=abrKlWjipBkQvhIICxtjYTUNv6WME0iJJObFvqVuudE,5014

+pip/_vendor/pygments/formatters/svg.py,sha256=6MM9YyO8NhU42RTQfTWBiagWMnsf9iG5gwhqSriHORE,7335

+pip/_vendor/pygments/formatters/terminal.py,sha256=NpEGvwkC6LgMLQTjVzGrJXji3XcET1sb5JCunSCzoRo,4674

+pip/_vendor/pygments/formatters/terminal256.py,sha256=4v4OVizvsxtwWBpIy_Po30zeOzE5oJg_mOc1-rCjMDk,11753

+pip/_vendor/pygments/lexer.py,sha256=ZPB_TGn_qzrXodRFwEdPzzJk6LZBo9BlfSy3lacc6zg,32005

+pip/_vendor/pygments/lexers/__init__.py,sha256=8d80-XfL5UKDCC1wRD1a_ZBZDkZ2HOe7Zul8SsnNYFE,11174

+pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-39.pyc,,

+pip/_vendor/pygments/lexers/__pycache__/python.cpython-39.pyc,,

+pip/_vendor/pygments/lexers/_mapping.py,sha256=zEiCV5FPiBioMJQJjw9kk7IJ5Y9GwknS4VJPYlcNchs,70232

+pip/_vendor/pygments/lexers/python.py,sha256=gZROs9iNSOA18YyVghP1cUCD0OwYZ04a6PCwgSOCeSA,53376

+pip/_vendor/pygments/modeline.py,sha256=gIbMSYrjSWPk0oATz7W9vMBYkUyTK2OcdVyKjioDRvA,986

+pip/_vendor/pygments/plugin.py,sha256=5rPxEoB_89qQMpOs0nI4KyLOzAHNlbQiwEMOKxqNmv8,2591

+pip/_vendor/pygments/regexopt.py,sha256=c6xcXGpGgvCET_3VWawJJqAnOp0QttFpQEdOPNY2Py0,3072

+pip/_vendor/pygments/scanner.py,sha256=F2T2G6cpkj-yZtzGQr-sOBw5w5-96UrJWveZN6va2aM,3092

+pip/_vendor/pygments/sphinxext.py,sha256=F8L0211sPnXaiWutN0lkSUajWBwlgDMIEFFAbMWOvZY,4630

+pip/_vendor/pygments/style.py,sha256=RRnussX1YiK9Z7HipIvKorImxu3-HnkdpPCO4u925T0,6257

+pip/_vendor/pygments/styles/__init__.py,sha256=iZDZ7PBKb55SpGlE1--cx9cbmWx5lVTH4bXO87t2Vok,3419

+pip/_vendor/pygments/styles/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/pygments/token.py,sha256=vA2yNHGJBHfq4jNQSah7C9DmIOp34MmYHPA8P-cYAHI,6184

+pip/_vendor/pygments/unistring.py,sha256=gP3gK-6C4oAFjjo9HvoahsqzuV4Qz0jl0E0OxfDerHI,63187

+pip/_vendor/pygments/util.py,sha256=KgwpWWC3By5AiNwxGTI7oI9aXupH2TyZWukafBJe0Mg,9110

+pip/_vendor/pyparsing/__init__.py,sha256=ZPdI7pPo4IYXcABw-51AcqOzsxVvDtqnQbyn_qYWZvo,9171

+pip/_vendor/pyparsing/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/pyparsing/__pycache__/actions.cpython-39.pyc,,

+pip/_vendor/pyparsing/__pycache__/common.cpython-39.pyc,,

+pip/_vendor/pyparsing/__pycache__/core.cpython-39.pyc,,

+pip/_vendor/pyparsing/__pycache__/exceptions.cpython-39.pyc,,

+pip/_vendor/pyparsing/__pycache__/helpers.cpython-39.pyc,,

+pip/_vendor/pyparsing/__pycache__/results.cpython-39.pyc,,

+pip/_vendor/pyparsing/__pycache__/testing.cpython-39.pyc,,

+pip/_vendor/pyparsing/__pycache__/unicode.cpython-39.pyc,,

+pip/_vendor/pyparsing/__pycache__/util.cpython-39.pyc,,

+pip/_vendor/pyparsing/actions.py,sha256=wU9i32e0y1ymxKE3OUwSHO-SFIrt1h_wv6Ws0GQjpNU,6426

+pip/_vendor/pyparsing/common.py,sha256=lFL97ooIeR75CmW5hjURZqwDCTgruqltcTCZ-ulLO2Q,12936

+pip/_vendor/pyparsing/core.py,sha256=AzTm1KFT1FIhiw2zvXZJmrpQoAwB0wOmeDCiR6SYytw,213344

+pip/_vendor/pyparsing/diagram/__init__.py,sha256=KW0PV_TvWKnL7jysz0pQbZ24nzWWu2ZfNaeyUIIywIg,23685

+pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/pyparsing/exceptions.py,sha256=3LbSafD32NYb1Tzt85GHNkhEAU1eZkTtNSk24cPMemo,9023

+pip/_vendor/pyparsing/helpers.py,sha256=QpUOjW0-psvueMwWb9bQpU2noqKCv98_wnw1VSzSdVo,39129

+pip/_vendor/pyparsing/results.py,sha256=HgNvWVXBdQP-Q6PtJfoCEeOJk2nwEvG-2KVKC5sGA30,25341

+pip/_vendor/pyparsing/testing.py,sha256=7tu4Abp4uSeJV0N_yEPRmmNUhpd18ZQP3CrX41DM814,13402

+pip/_vendor/pyparsing/unicode.py,sha256=fwuhMj30SQ165Cv7HJpu-rSxGbRm93kN9L4Ei7VGc1Y,10787

+pip/_vendor/pyparsing/util.py,sha256=kq772O5YSeXOSdP-M31EWpbH_ayj7BMHImBYo9xPD5M,6805

+pip/_vendor/requests/__init__.py,sha256=3XN75ZS4slWy3TQsEGF7-Q6l2R146teU-s2_rXNhxhU,5178

+pip/_vendor/requests/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/__version__.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/_internal_utils.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/adapters.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/api.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/auth.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/certs.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/compat.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/cookies.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/exceptions.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/help.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/hooks.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/models.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/packages.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/sessions.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/status_codes.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/structures.cpython-39.pyc,,

+pip/_vendor/requests/__pycache__/utils.cpython-39.pyc,,

+pip/_vendor/requests/__version__.py,sha256=nJVa3ef2yRyeYMhy7yHnRyjjpnNTDykZsE4Sp9irBC4,440

+pip/_vendor/requests/_internal_utils.py,sha256=aSPlF4uDhtfKxEayZJJ7KkAxtormeTfpwKSBSwtmAUw,1397

+pip/_vendor/requests/adapters.py,sha256=GFEz5koZaMZD86v0SHXKVB5SE9MgslEjkCQzldkNwVM,21443

+pip/_vendor/requests/api.py,sha256=dyvkDd5itC9z2g0wHl_YfD1yf6YwpGWLO7__8e21nks,6377

+pip/_vendor/requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187

+pip/_vendor/requests/certs.py,sha256=PVPooB0jP5hkZEULSCwC074532UFbR2Ptgu0I5zwmCs,575

+pip/_vendor/requests/compat.py,sha256=IhK9quyX0RRuWTNcg6d2JGSAOUbM6mym2p_2XjLTwf4,1286

+pip/_vendor/requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560

+pip/_vendor/requests/exceptions.py,sha256=FA-_kVwBZ2jhXauRctN_ewHVK25b-fj0Azyz1THQ0Kk,3823

+pip/_vendor/requests/help.py,sha256=FnAAklv8MGm_qb2UilDQgS6l0cUttiCFKUjx0zn2XNA,3879

+pip/_vendor/requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733

+pip/_vendor/requests/models.py,sha256=GZRMMrGwDOLVvVfFHLUq0qTfIWDla3NcFHa1f5xs9Q8,35287

+pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695

+pip/_vendor/requests/sessions.py,sha256=KUqJcRRLovNefUs7ScOXSUVCcfSayTFWtbiJ7gOSlTI,30180

+pip/_vendor/requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235

+pip/_vendor/requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912

+pip/_vendor/requests/utils.py,sha256=0gzSOcx9Ya4liAbHnHuwt4jM78lzCZZoDFgkmsInNUg,33240

+pip/_vendor/resolvelib/__init__.py,sha256=UL-B2BDI0_TRIqkfGwLHKLxY-LjBlomz7941wDqzB1I,537

+pip/_vendor/resolvelib/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/resolvelib/__pycache__/providers.cpython-39.pyc,,

+pip/_vendor/resolvelib/__pycache__/reporters.cpython-39.pyc,,

+pip/_vendor/resolvelib/__pycache__/resolvers.cpython-39.pyc,,

+pip/_vendor/resolvelib/__pycache__/structs.cpython-39.pyc,,

+pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-39.pyc,,

+pip/_vendor/resolvelib/compat/collections_abc.py,sha256=uy8xUZ-NDEw916tugUXm8HgwCGiMO0f-RcdnpkfXfOs,156

+pip/_vendor/resolvelib/providers.py,sha256=roVmFBItQJ0TkhNua65h8LdNny7rmeqVEXZu90QiP4o,5872

+pip/_vendor/resolvelib/reporters.py,sha256=fW91NKf-lK8XN7i6Yd_rczL5QeOT3sc6AKhpaTEnP3E,1583

+pip/_vendor/resolvelib/resolvers.py,sha256=2wYzVGBGerbmcIpH8cFmgSKgLSETz8jmwBMGjCBMHG4,17592

+pip/_vendor/resolvelib/structs.py,sha256=IVIYof6sA_N4ZEiE1C1UhzTX495brCNnyCdgq6CYq28,4794

+pip/_vendor/rich/__init__.py,sha256=zREyQ22R3zKg8gMdhiikczdVQYtZNeayHNrbBg5scm0,5944

+pip/_vendor/rich/__main__.py,sha256=BmTmBWI93ytq75IEPi1uAAdeRYzFfDbgaAXjsX1ogig,8808

+pip/_vendor/rich/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/__main__.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_cell_widths.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_emoji_codes.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_emoji_replace.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_export_format.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_extension.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_inspect.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_log_render.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_loop.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_palettes.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_pick.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_ratio.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_spinners.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_stack.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_timer.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_win32_console.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_windows.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_windows_renderer.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/_wrap.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/abc.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/align.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/ansi.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/bar.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/box.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/cells.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/color.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/color_triplet.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/columns.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/console.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/constrain.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/containers.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/control.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/default_styles.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/diagnose.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/emoji.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/errors.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/file_proxy.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/filesize.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/highlighter.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/json.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/jupyter.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/layout.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/live.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/live_render.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/logging.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/markup.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/measure.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/padding.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/pager.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/palette.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/panel.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/pretty.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/progress.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/progress_bar.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/prompt.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/protocol.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/region.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/repr.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/rule.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/scope.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/screen.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/segment.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/spinner.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/status.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/style.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/styled.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/syntax.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/table.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/terminal_theme.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/text.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/theme.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/themes.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/traceback.cpython-39.pyc,,

+pip/_vendor/rich/__pycache__/tree.cpython-39.pyc,,

+pip/_vendor/rich/_cell_widths.py,sha256=2n4EiJi3X9sqIq0O16kUZ_zy6UYMd3xFfChlKfnW1Hc,10096

+pip/_vendor/rich/_emoji_codes.py,sha256=hu1VL9nbVdppJrVoijVshRlcRRe_v3dju3Mmd2sKZdY,140235

+pip/_vendor/rich/_emoji_replace.py,sha256=n-kcetsEUx2ZUmhQrfeMNc-teeGhpuSQ5F8VPBsyvDo,1064

+pip/_vendor/rich/_export_format.py,sha256=nHArqOljIlYn6NruhWsAsh-fHo7oJC3y9BDJyAa-QYQ,2114

+pip/_vendor/rich/_extension.py,sha256=Xt47QacCKwYruzjDi-gOBq724JReDj9Cm9xUi5fr-34,265

+pip/_vendor/rich/_inspect.py,sha256=oZJGw31e64dwXSCmrDnvZbwVb1ZKhWfU8wI3VWohjJk,9695

+pip/_vendor/rich/_log_render.py,sha256=1ByI0PA1ZpxZY3CGJOK54hjlq4X-Bz_boIjIqCd8Kns,3225

+pip/_vendor/rich/_loop.py,sha256=hV_6CLdoPm0va22Wpw4zKqM0RYsz3TZxXj0PoS-9eDQ,1236

+pip/_vendor/rich/_palettes.py,sha256=cdev1JQKZ0JvlguV9ipHgznTdnvlIzUFDBb0It2PzjI,7063

+pip/_vendor/rich/_pick.py,sha256=evDt8QN4lF5CiwrUIXlOJCntitBCOsI3ZLPEIAVRLJU,423

+pip/_vendor/rich/_ratio.py,sha256=2lLSliL025Y-YMfdfGbutkQDevhcyDqc-DtUYW9mU70,5472

+pip/_vendor/rich/_spinners.py,sha256=U2r1_g_1zSjsjiUdAESc2iAMc3i4ri_S8PYP6kQ5z1I,19919

+pip/_vendor/rich/_stack.py,sha256=-C8OK7rxn3sIUdVwxZBBpeHhIzX0eI-VM3MemYfaXm0,351

+pip/_vendor/rich/_timer.py,sha256=zelxbT6oPFZnNrwWPpc1ktUeAT-Vc4fuFcRZLQGLtMI,417

+pip/_vendor/rich/_win32_console.py,sha256=P0vxI2fcndym1UU1S37XAzQzQnkyY7YqAKmxm24_gug,22820

+pip/_vendor/rich/_windows.py,sha256=dvNl9TmfPzNVxiKk5WDFihErZ5796g2UC9-KGGyfXmk,1926

+pip/_vendor/rich/_windows_renderer.py,sha256=t74ZL3xuDCP3nmTp9pH1L5LiI2cakJuQRQleHCJerlk,2783

+pip/_vendor/rich/_wrap.py,sha256=xfV_9t0Sg6rzimmrDru8fCVmUlalYAcHLDfrJZnbbwQ,1840

+pip/_vendor/rich/abc.py,sha256=ON-E-ZqSSheZ88VrKX2M3PXpFbGEUUZPMa_Af0l-4f0,890

+pip/_vendor/rich/align.py,sha256=FV6_GS-8uhIyViMng3hkIWSFaTgMohK1Oqyjl8I8mGE,10368

+pip/_vendor/rich/ansi.py,sha256=HtaPG7dvgL6_yo0sQmx5CM05DJ4_1goY5SWXXOYNaKs,6820

+pip/_vendor/rich/bar.py,sha256=a7UD303BccRCrEhGjfMElpv5RFYIinaAhAuqYqhUvmw,3264

+pip/_vendor/rich/box.py,sha256=1Iv1sUWqjtp5XwLwGH-AJ8HgyXZ7dRFUkO0z3M_bRl8,9864

+pip/_vendor/rich/cells.py,sha256=zMjFI15wCpgjLR14lHdfFMVC6qMDi5OsKIB0PYZBBMk,4503

+pip/_vendor/rich/color.py,sha256=kp87L8V4-3qayE6CUxtW_nP8Ujfew_-DAhNwYMXBMOY,17957

+pip/_vendor/rich/color_triplet.py,sha256=3lhQkdJbvWPoLDO-AnYImAWmJvV5dlgYNCVZ97ORaN4,1054

+pip/_vendor/rich/columns.py,sha256=HUX0KcMm9dsKNi11fTbiM_h2iDtl8ySCaVcxlalEzq8,7131

+pip/_vendor/rich/console.py,sha256=bTT9DNX03V4cQXefg22d-gLSs_e_ZY2zdCvLIlEyU2Q,95885

+pip/_vendor/rich/constrain.py,sha256=1VIPuC8AgtKWrcncQrjBdYqA3JVWysu6jZo1rrh7c7Q,1288

+pip/_vendor/rich/containers.py,sha256=aKgm5UDHn5Nmui6IJaKdsZhbHClh_X7D-_Wg8Ehrr7s,5497

+pip/_vendor/rich/control.py,sha256=DSkHTUQLorfSERAKE_oTAEUFefZnZp4bQb4q8rHbKws,6630

+pip/_vendor/rich/default_styles.py,sha256=WqVh-RPNEsx0Wxf3fhS_fCn-wVqgJ6Qfo-Zg7CoCsLE,7954

+pip/_vendor/rich/diagnose.py,sha256=an6uouwhKPAlvQhYpNNpGq9EJysfMIOvvCbO3oSoR24,972

+pip/_vendor/rich/emoji.py,sha256=omTF9asaAnsM4yLY94eR_9dgRRSm1lHUszX20D1yYCQ,2501

+pip/_vendor/rich/errors.py,sha256=5pP3Kc5d4QJ_c0KFsxrfyhjiPVe7J1zOqSFbFAzcV-Y,642

+pip/_vendor/rich/file_proxy.py,sha256=4gCbGRXg0rW35Plaf0UVvj3dfENHuzc_n8I_dBqxI7o,1616

+pip/_vendor/rich/filesize.py,sha256=yShoVpARafJBreyZFaAhC4OhnJ6ydC1WXR-Ez4wU_YQ,2507

+pip/_vendor/rich/highlighter.py,sha256=3WW6PACGlq0e3YDjfqiMBQ0dYZwu7pcoFYUgJy01nb0,9585

+pip/_vendor/rich/json.py,sha256=RCm4lXBXrjvXHpqrWPH8wdGP0jEo4IohLmkddlhRY18,5051

+pip/_vendor/rich/jupyter.py,sha256=QyoKoE_8IdCbrtiSHp9TsTSNyTHY0FO5whE7jOTd9UE,3252

+pip/_vendor/rich/layout.py,sha256=E3xJ4fomizUADwime3VA0lBXoMSPl9blEokIzVBjO0Q,14074

+pip/_vendor/rich/live.py,sha256=emVaLUua-FKSYqZXmtJJjBIstO99CqMOuA6vMAKVkO0,14172

+pip/_vendor/rich/live_render.py,sha256=zElm3PrfSIvjOce28zETHMIUf9pFYSUA5o0AflgUP64,3667

+pip/_vendor/rich/logging.py,sha256=10j13lPr-QuYqEEBz_2aRJp8gNYvSN2wmCUlUqJcPLM,11471

+pip/_vendor/rich/markup.py,sha256=xzF4uAafiEeEYDJYt_vUnJOGoTU8RrH-PH7WcWYXjCg,8198

+pip/_vendor/rich/measure.py,sha256=HmrIJX8sWRTHbgh8MxEay_83VkqNW_70s8aKP5ZcYI8,5305

+pip/_vendor/rich/padding.py,sha256=kTFGsdGe0os7tXLnHKpwTI90CXEvrceeZGCshmJy5zw,4970

+pip/_vendor/rich/pager.py,sha256=SO_ETBFKbg3n_AgOzXm41Sv36YxXAyI3_R-KOY2_uSc,828

+pip/_vendor/rich/palette.py,sha256=lInvR1ODDT2f3UZMfL1grq7dY_pDdKHw4bdUgOGaM4Y,3396

+pip/_vendor/rich/panel.py,sha256=CzdojkDAjxAKgvDxis47nWzUh1V2NniOqkJJQajosG8,8744

+pip/_vendor/rich/pretty.py,sha256=CalVLVW3mvTn1hvI9Pgi2v-y4S-5zUWBK-PH7SlVs-U,36576

+pip/_vendor/rich/progress.py,sha256=zjQRwd3TmDnAvSjTPsNPHFjmqE9GOEX3bf0Lj56hIL8,59746

+pip/_vendor/rich/progress_bar.py,sha256=zHHaFPEfIhW2fq6Fnl5vBY7AUpP1N0HVGElISUHsnqw,8161

+pip/_vendor/rich/prompt.py,sha256=x0mW-pIPodJM4ry6grgmmLrl8VZp99kqcmdnBe70YYA,11303

+pip/_vendor/rich/protocol.py,sha256=5hHHDDNHckdk8iWH5zEbi-zuIVSF5hbU2jIo47R7lTE,1391

+pip/_vendor/rich/region.py,sha256=rNT9xZrVZTYIXZC0NYn41CJQwYNbR-KecPOxTgQvB8Y,166

+pip/_vendor/rich/repr.py,sha256=Je91CIrZN_av9L3FRCKCs5yoX2LvczrCNKqUbVsjUvQ,4449

+pip/_vendor/rich/rule.py,sha256=V6AWI0wCb6DB0rvN967FRMlQrdlG7HoZdfEAHyeG8CM,4773

+pip/_vendor/rich/scope.py,sha256=HX13XsJfqzQHpPfw4Jn9JmJjCsRj9uhHxXQEqjkwyLA,2842

+pip/_vendor/rich/screen.py,sha256=YoeReESUhx74grqb0mSSb9lghhysWmFHYhsbMVQjXO8,1591

+pip/_vendor/rich/segment.py,sha256=6XdX0MfL18tUCaUWDWncIqx0wpq3GiaqzhYP779JvRA,24224

+pip/_vendor/rich/spinner.py,sha256=7b8MCleS4fa46HX0AzF98zfu6ZM6fAL0UgYzPOoakF4,4374

+pip/_vendor/rich/status.py,sha256=gJsIXIZeSo3urOyxRUjs6VrhX5CZrA0NxIQ-dxhCnwo,4425

+pip/_vendor/rich/style.py,sha256=4WnUEkHNMp9Tfmd8cmbxWGby7QeTk2LUTQzFSs46EQc,26240

+pip/_vendor/rich/styled.py,sha256=eZNnzGrI4ki_54pgY3Oj0T-x3lxdXTYh4_ryDB24wBU,1258

+pip/_vendor/rich/syntax.py,sha256=_M08KbE11nNWNBPooFLKAA7lWkThPzlGUsuesxQYsuA,34697

+pip/_vendor/rich/table.py,sha256=r_lahmj45cINCWLYaIjq9yEv3gve8E6bkYTP8NDqApE,39515

+pip/_vendor/rich/terminal_theme.py,sha256=1j5-ufJfnvlAo5Qsi_ACZiXDmwMXzqgmFByObT9-yJY,3370

+pip/_vendor/rich/text.py,sha256=oajdGIeHcLcSdOwbC48_20ylDsHAS5fsPZD_Ih0clyA,44666

+pip/_vendor/rich/theme.py,sha256=GKNtQhDBZKAzDaY0vQVQQFzbc0uWfFe6CJXA-syT7zQ,3627

+pip/_vendor/rich/themes.py,sha256=0xgTLozfabebYtcJtDdC5QkX5IVUEaviqDUJJh4YVFk,102

+pip/_vendor/rich/traceback.py,sha256=MORQpXH7AvhAAThW8oIbtwffXb8M6XRkSkcJ52JuA3g,26060

+pip/_vendor/rich/tree.py,sha256=BMbUYNjS9uodNPfvtY_odmU09GA5QzcMbQ5cJZhllQI,9169

+pip/_vendor/six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549

+pip/_vendor/tenacity/__init__.py,sha256=rjcWJVq5PcNJNC42rt-TAGGskM-RUEkZbDKu1ra7IPo,18364

+pip/_vendor/tenacity/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/tenacity/__pycache__/_asyncio.cpython-39.pyc,,

+pip/_vendor/tenacity/__pycache__/_utils.cpython-39.pyc,,

+pip/_vendor/tenacity/__pycache__/after.cpython-39.pyc,,

+pip/_vendor/tenacity/__pycache__/before.cpython-39.pyc,,

+pip/_vendor/tenacity/__pycache__/before_sleep.cpython-39.pyc,,

+pip/_vendor/tenacity/__pycache__/nap.cpython-39.pyc,,

+pip/_vendor/tenacity/__pycache__/retry.cpython-39.pyc,,

+pip/_vendor/tenacity/__pycache__/stop.cpython-39.pyc,,

+pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-39.pyc,,

+pip/_vendor/tenacity/__pycache__/wait.cpython-39.pyc,,

+pip/_vendor/tenacity/_asyncio.py,sha256=HEb0BVJEeBJE9P-m9XBxh1KcaF96BwoeqkJCL5sbVcQ,3314

+pip/_vendor/tenacity/_utils.py,sha256=-y68scDcyoqvTJuJJ0GTfjdSCljEYlbCYvgk7nM4NdM,1944

+pip/_vendor/tenacity/after.py,sha256=dlmyxxFy2uqpLXDr838DiEd7jgv2AGthsWHGYcGYsaI,1496

+pip/_vendor/tenacity/before.py,sha256=7XtvRmO0dRWUp8SVn24OvIiGFj8-4OP5muQRUiWgLh0,1376

+pip/_vendor/tenacity/before_sleep.py,sha256=ThyDvqKU5yle_IvYQz_b6Tp6UjUS0PhVp6zgqYl9U6Y,1908

+pip/_vendor/tenacity/nap.py,sha256=fRWvnz1aIzbIq9Ap3gAkAZgDH6oo5zxMrU6ZOVByq0I,1383

+pip/_vendor/tenacity/retry.py,sha256=Cy504Ss3UrRV7lnYgvymF66WD1wJ2dbM869kDcjuDes,7550

+pip/_vendor/tenacity/stop.py,sha256=sKHmHaoSaW6sKu3dTxUVKr1-stVkY7lw4Y9yjZU30zQ,2790

+pip/_vendor/tenacity/tornadoweb.py,sha256=E8lWO2nwe6dJgoB-N2HhQprYLDLB_UdSgFnv-EN6wKE,2145

+pip/_vendor/tenacity/wait.py,sha256=tdLTESRm5E237VHG0SxCDXRa0DHKPKVq285kslHVURc,8011

+pip/_vendor/tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396

+pip/_vendor/tomli/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/tomli/__pycache__/_parser.cpython-39.pyc,,

+pip/_vendor/tomli/__pycache__/_re.cpython-39.pyc,,

+pip/_vendor/tomli/__pycache__/_types.cpython-39.pyc,,

+pip/_vendor/tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633

+pip/_vendor/tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943

+pip/_vendor/tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254

+pip/_vendor/typing_extensions.py,sha256=VKZ_nHsuzDbKOVUY2CTdavwBgfZ2EXRyluZHRzUYAbg,80114

+pip/_vendor/urllib3/__init__.py,sha256=iXLcYiJySn0GNbWOOZDDApgBL1JgP44EZ8i1760S8Mc,3333

+pip/_vendor/urllib3/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/urllib3/__pycache__/_collections.cpython-39.pyc,,

+pip/_vendor/urllib3/__pycache__/_version.cpython-39.pyc,,

+pip/_vendor/urllib3/__pycache__/connection.cpython-39.pyc,,

+pip/_vendor/urllib3/__pycache__/connectionpool.cpython-39.pyc,,

+pip/_vendor/urllib3/__pycache__/exceptions.cpython-39.pyc,,

+pip/_vendor/urllib3/__pycache__/fields.cpython-39.pyc,,

+pip/_vendor/urllib3/__pycache__/filepost.cpython-39.pyc,,

+pip/_vendor/urllib3/__pycache__/poolmanager.cpython-39.pyc,,

+pip/_vendor/urllib3/__pycache__/request.cpython-39.pyc,,

+pip/_vendor/urllib3/__pycache__/response.cpython-39.pyc,,

+pip/_vendor/urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811

+pip/_vendor/urllib3/_version.py,sha256=GhuGBUT_MtRxHEHDb-LYs5yLPeYWlCwFBPjGZmVJbVg,64

+pip/_vendor/urllib3/connection.py,sha256=8976wL6sGeVMW0JnXvx5mD00yXu87uQjxtB9_VL8dx8,20070

+pip/_vendor/urllib3/connectionpool.py,sha256=vEzk1iJEw1qR2vHBo7m3Y98iDfna6rKkUz3AyK5lJKQ,39093

+pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-39.pyc,,

+pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-39.pyc,,

+pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-39.pyc,,

+pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-39.pyc,,

+pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-39.pyc,,

+pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-39.pyc,,

+pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957

+pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-39.pyc,,

+pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-39.pyc,,

+pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632

+pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922

+pip/_vendor/urllib3/contrib/appengine.py,sha256=lfzpHFmJiO82shClLEm3QB62SYgHWnjpZOH_2JhU5Tc,11034

+pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=ej9gGvfAb2Gt00lafFp45SIoRz-QwrQ4WChm6gQmAlM,4538

+pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=rt9NEIP8iMBLxxRhH0jLnmshW-OFP83jEayxMSqu2MU,17182

+pip/_vendor/urllib3/contrib/securetransport.py,sha256=yhZdmVjY6PI6EeFbp7qYOp6-vp1Rkv2NMuOGaEj7pmc,34448

+pip/_vendor/urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097

+pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217

+pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579

+pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440

+pip/_vendor/urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/urllib3/packages/__pycache__/six.cpython-39.pyc,,

+pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

+pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-39.pyc,,

+pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417

+pip/_vendor/urllib3/packages/six.py,sha256=b9LM0wBXv7E7SrbCjAm4wwN-hrH-iNxv18LgWNMMKPo,34665

+pip/_vendor/urllib3/poolmanager.py,sha256=0KOOJECoeLYVjUHvv-0h4Oq3FFQQ2yb-Fnjkbj8gJO0,19786

+pip/_vendor/urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985

+pip/_vendor/urllib3/response.py,sha256=p3VBYPhwBca77wCZfmoXvEDVVC3SdF7yxQ6TXuxy1BI,30109

+pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155

+pip/_vendor/urllib3/util/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/connection.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/proxy.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/queue.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/request.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/response.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/retry.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/timeout.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/url.cpython-39.pyc,,

+pip/_vendor/urllib3/util/__pycache__/wait.cpython-39.pyc,,

+pip/_vendor/urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901

+pip/_vendor/urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605

+pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498

+pip/_vendor/urllib3/util/request.py,sha256=C0OUt2tcU6LRiQJ7YYNP9GvPrSvl7ziIBekQ-5nlBZk,3997

+pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510

+pip/_vendor/urllib3/util/retry.py,sha256=iESg2PvViNdXBRY4MpL4h0kqwOOkHkxmLn1kkhFHPU8,22001

+pip/_vendor/urllib3/util/ssl_.py,sha256=X4-AqW91aYPhPx6-xbf66yHFQKbqqfC_5Zt4WkLX1Hc,17177

+pip/_vendor/urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758

+pip/_vendor/urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895

+pip/_vendor/urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003

+pip/_vendor/urllib3/util/url.py,sha256=49HwObaTUUjqVe4qvSUvIjZyf3ghgNA6-OLm3kmkFKM,14287

+pip/_vendor/urllib3/util/wait.py,sha256=fOX0_faozG2P7iVojQoE1mbydweNyTcm-hXEfFrTtLI,5403

+pip/_vendor/vendor.txt,sha256=07gLL_CcEHdl1XM0g4PH2L4gsTTMlJr8WWIC11yEyMo,469

+pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579

+pip/_vendor/webencodings/__pycache__/__init__.cpython-39.pyc,,

+pip/_vendor/webencodings/__pycache__/labels.cpython-39.pyc,,

+pip/_vendor/webencodings/__pycache__/mklabels.cpython-39.pyc,,

+pip/_vendor/webencodings/__pycache__/tests.cpython-39.pyc,,

+pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-39.pyc,,

+pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979

+pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305

+pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563

+pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307

+pip/py.typed,sha256=EBVvvPRTn_eIpz5e5QztSCdrMX7Qwd7VP93RSoIlZ2I,286

diff --git a/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/REQUESTED b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/REQUESTED
diff --git a/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/WHEEL b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/WHEEL
new file mode 100644
index 0000000..becc9a6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/entry_points.txt b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/entry_points.txt
new file mode 100644
index 0000000..5367846
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/entry_points.txt
@@ -0,0 +1,4 @@
+[console_scripts]
+pip = pip._internal.cli.main:main
+pip3 = pip._internal.cli.main:main
+pip3.10 = pip._internal.cli.main:main
diff --git a/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/top_level.txt b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/top_level.txt
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip-22.3.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.9/site-packages/pip/__init__.py b/venv/lib/python3.9/site-packages/pip/__init__.py
new file mode 100644
index 0000000..5563b5d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/__init__.py
@@ -0,0 +1,13 @@
+from typing import List, Optional
+
+__version__ = "22.3.1"
+
+
+def main(args: Optional[List[str]] = None) -> int:
+    """This is an internal API only meant for use by pip's own console scripts.
+
+    For additional details, see https://github.com/pypa/pip/issues/7498.
+    """
+    from pip._internal.utils.entrypoints import _wrapper
+
+    return _wrapper(args)
diff --git a/venv/lib/python3.9/site-packages/pip/__main__.py b/venv/lib/python3.9/site-packages/pip/__main__.py
new file mode 100644
index 0000000..fe34a7b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/__main__.py
@@ -0,0 +1,31 @@
+import os
+import sys
+import warnings
+
+# Remove '' and current working directory from the first entry
+# of sys.path, if present to avoid using current directory
+# in pip commands check, freeze, install, list and show,
+# when invoked as python -m pip <command>
+if sys.path[0] in ("", os.getcwd()):
+    sys.path.pop(0)
+
+# If we are running from a wheel, add the wheel to sys.path
+# This allows the usage python pip-*.whl/pip install pip-*.whl
+if __package__ == "":
+    # __file__ is pip-*.whl/pip/__main__.py
+    # first dirname call strips of '/__main__.py', second strips off '/pip'
+    # Resulting path is the name of the wheel itself
+    # Add that to sys.path so we can import pip
+    path = os.path.dirname(os.path.dirname(__file__))
+    sys.path.insert(0, path)
+
+if __name__ == "__main__":
+    # Work around the error reported in #9540, pending a proper fix.
+    # Note: It is essential the warning filter is set *before* importing
+    #       pip, as the deprecation happens at import time, not runtime.
+    warnings.filterwarnings(
+        "ignore", category=DeprecationWarning, module=".*packaging\\.version"
+    )
+    from pip._internal.cli.main import main as _main
+
+    sys.exit(_main())
diff --git a/venv/lib/python3.9/site-packages/pip/__pip-runner__.py b/venv/lib/python3.9/site-packages/pip/__pip-runner__.py
new file mode 100644
index 0000000..49a148a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/__pip-runner__.py
@@ -0,0 +1,50 @@
+"""Execute exactly this copy of pip, within a different environment.
+
+This file is named as it is, to ensure that this module can't be imported via
+an import statement.
+"""
+
+# /!\ This version compatibility check section must be Python 2 compatible. /!\
+
+import sys
+
+# Copied from setup.py
+PYTHON_REQUIRES = (3, 7)
+
+
+def version_str(version):  # type: ignore
+    return ".".join(str(v) for v in version)
+
+
+if sys.version_info[:2] < PYTHON_REQUIRES:
+    raise SystemExit(
+        "This version of pip does not support python {} (requires >={}).".format(
+            version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES)
+        )
+    )
+
+# From here on, we can use Python 3 features, but the syntax must remain
+# Python 2 compatible.
+
+import runpy  # noqa: E402
+from importlib.machinery import PathFinder  # noqa: E402
+from os.path import dirname  # noqa: E402
+
+PIP_SOURCES_ROOT = dirname(dirname(__file__))
+
+
+class PipImportRedirectingFinder:
+    @classmethod
+    def find_spec(self, fullname, path=None, target=None):  # type: ignore
+        if fullname != "pip":
+            return None
+
+        spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target)
+        assert spec, (PIP_SOURCES_ROOT, fullname)
+        return spec
+
+
+sys.meta_path.insert(0, PipImportRedirectingFinder())
+
+assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
+runpy.run_module("pip", run_name="__main__", alter_sys=True)
diff --git a/venv/lib/python3.9/site-packages/pip/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..0e26201
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/__pycache__/__main__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/__pycache__/__main__.cpython-39.pyc
new file mode 100644
index 0000000..d474dfb
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/__pycache__/__main__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/__pycache__/__pip-runner__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/__pycache__/__pip-runner__.cpython-39.pyc
new file mode 100644
index 0000000..dff8808
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/__pycache__/__pip-runner__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/__init__.py
new file mode 100644
index 0000000..6afb5c6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/__init__.py
@@ -0,0 +1,19 @@
+from typing import List, Optional
+
+import pip._internal.utils.inject_securetransport  # noqa
+from pip._internal.utils import _log
+
+# init_logging() must be called before any call to logging.getLogger()
+# which happens at import of most modules.
+_log.init_logging()
+
+
+def main(args: (Optional[List[str]]) = None) -> int:
+    """This is preserved for old console scripts that may still be referencing
+    it.
+
+    For additional details, see https://github.com/pypa/pip/issues/7498.
+    """
+    from pip._internal.utils.entrypoints import _wrapper
+
+    return _wrapper(args)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..10f5e29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/build_env.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/build_env.cpython-39.pyc
new file mode 100644
index 0000000..0a2196f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/build_env.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/cache.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/cache.cpython-39.pyc
new file mode 100644
index 0000000..b471dd6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/cache.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/configuration.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/configuration.cpython-39.pyc
new file mode 100644
index 0000000..ca24c02
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/configuration.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/exceptions.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/exceptions.cpython-39.pyc
new file mode 100644
index 0000000..7dcca48
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/exceptions.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/main.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/main.cpython-39.pyc
new file mode 100644
index 0000000..456b0aa
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/main.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/pyproject.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/pyproject.cpython-39.pyc
new file mode 100644
index 0000000..01eee74
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/pyproject.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc
new file mode 100644
index 0000000..fa6ead0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-39.pyc
new file mode 100644
index 0000000..e5519ca
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/build_env.py b/venv/lib/python3.9/site-packages/pip/_internal/build_env.py
new file mode 100644
index 0000000..cc2b38b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/build_env.py
@@ -0,0 +1,310 @@
+"""Build Environment used for isolation during sdist building
+"""
+
+import logging
+import os
+import pathlib
+import site
+import sys
+import textwrap
+from collections import OrderedDict
+from sysconfig import get_paths
+from types import TracebackType
+from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type
+
+from pip._vendor.certifi import where
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.version import Version
+
+from pip import __file__ as pip_location
+from pip._internal.cli.spinners import open_spinner
+from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib
+from pip._internal.metadata import get_default_environment, get_environment
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
+
+if TYPE_CHECKING:
+    from pip._internal.index.package_finder import PackageFinder
+
+logger = logging.getLogger(__name__)
+
+
+class _Prefix:
+    def __init__(self, path: str) -> None:
+        self.path = path
+        self.setup = False
+        self.bin_dir = get_paths(
+            "nt" if os.name == "nt" else "posix_prefix",
+            vars={"base": path, "platbase": path},
+        )["scripts"]
+        self.lib_dirs = get_prefixed_libs(path)
+
+
+def get_runnable_pip() -> str:
+    """Get a file to pass to a Python executable, to run the currently-running pip.
+
+    This is used to run a pip subprocess, for installing requirements into the build
+    environment.
+    """
+    source = pathlib.Path(pip_location).resolve().parent
+
+    if not source.is_dir():
+        # This would happen if someone is using pip from inside a zip file. In that
+        # case, we can use that directly.
+        return str(source)
+
+    return os.fsdecode(source / "__pip-runner__.py")
+
+
+def _get_system_sitepackages() -> Set[str]:
+    """Get system site packages
+
+    Usually from site.getsitepackages,
+    but fallback on `get_purelib()/get_platlib()` if unavailable
+    (e.g. in a virtualenv created by virtualenv<20)
+
+    Returns normalized set of strings.
+    """
+    if hasattr(site, "getsitepackages"):
+        system_sites = site.getsitepackages()
+    else:
+        # virtualenv < 20 overwrites site.py without getsitepackages
+        # fallback on get_purelib/get_platlib.
+        # this is known to miss things, but shouldn't in the cases
+        # where getsitepackages() has been removed (inside a virtualenv)
+        system_sites = [get_purelib(), get_platlib()]
+    return {os.path.normcase(path) for path in system_sites}
+
+
+class BuildEnvironment:
+    """Creates and manages an isolated environment to install build deps"""
+
+    def __init__(self) -> None:
+        temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
+
+        self._prefixes = OrderedDict(
+            (name, _Prefix(os.path.join(temp_dir.path, name)))
+            for name in ("normal", "overlay")
+        )
+
+        self._bin_dirs: List[str] = []
+        self._lib_dirs: List[str] = []
+        for prefix in reversed(list(self._prefixes.values())):
+            self._bin_dirs.append(prefix.bin_dir)
+            self._lib_dirs.extend(prefix.lib_dirs)
+
+        # Customize site to:
+        # - ensure .pth files are honored
+        # - prevent access to system site packages
+        system_sites = _get_system_sitepackages()
+
+        self._site_dir = os.path.join(temp_dir.path, "site")
+        if not os.path.exists(self._site_dir):
+            os.mkdir(self._site_dir)
+        with open(
+            os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
+        ) as fp:
+            fp.write(
+                textwrap.dedent(
+                    """
+                import os, site, sys
+
+                # First, drop system-sites related paths.
+                original_sys_path = sys.path[:]
+                known_paths = set()
+                for path in {system_sites!r}:
+                    site.addsitedir(path, known_paths=known_paths)
+                system_paths = set(
+                    os.path.normcase(path)
+                    for path in sys.path[len(original_sys_path):]
+                )
+                original_sys_path = [
+                    path for path in original_sys_path
+                    if os.path.normcase(path) not in system_paths
+                ]
+                sys.path = original_sys_path
+
+                # Second, add lib directories.
+                # ensuring .pth file are processed.
+                for path in {lib_dirs!r}:
+                    assert not path in sys.path
+                    site.addsitedir(path)
+                """
+                ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
+            )
+
+    def __enter__(self) -> None:
+        self._save_env = {
+            name: os.environ.get(name, None)
+            for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
+        }
+
+        path = self._bin_dirs[:]
+        old_path = self._save_env["PATH"]
+        if old_path:
+            path.extend(old_path.split(os.pathsep))
+
+        pythonpath = [self._site_dir]
+
+        os.environ.update(
+            {
+                "PATH": os.pathsep.join(path),
+                "PYTHONNOUSERSITE": "1",
+                "PYTHONPATH": os.pathsep.join(pythonpath),
+            }
+        )
+
+    def __exit__(
+        self,
+        exc_type: Optional[Type[BaseException]],
+        exc_val: Optional[BaseException],
+        exc_tb: Optional[TracebackType],
+    ) -> None:
+        for varname, old_value in self._save_env.items():
+            if old_value is None:
+                os.environ.pop(varname, None)
+            else:
+                os.environ[varname] = old_value
+
+    def check_requirements(
+        self, reqs: Iterable[str]
+    ) -> Tuple[Set[Tuple[str, str]], Set[str]]:
+        """Return 2 sets:
+        - conflicting requirements: set of (installed, wanted) reqs tuples
+        - missing requirements: set of reqs
+        """
+        missing = set()
+        conflicting = set()
+        if reqs:
+            env = (
+                get_environment(self._lib_dirs)
+                if hasattr(self, "_lib_dirs")
+                else get_default_environment()
+            )
+            for req_str in reqs:
+                req = Requirement(req_str)
+                # We're explicitly evaluating with an empty extra value, since build
+                # environments are not provided any mechanism to select specific extras.
+                if req.marker is not None and not req.marker.evaluate({"extra": ""}):
+                    continue
+                dist = env.get_distribution(req.name)
+                if not dist:
+                    missing.add(req_str)
+                    continue
+                if isinstance(dist.version, Version):
+                    installed_req_str = f"{req.name}=={dist.version}"
+                else:
+                    installed_req_str = f"{req.name}==={dist.version}"
+                if not req.specifier.contains(dist.version, prereleases=True):
+                    conflicting.add((installed_req_str, req_str))
+                # FIXME: Consider direct URL?
+        return conflicting, missing
+
+    def install_requirements(
+        self,
+        finder: "PackageFinder",
+        requirements: Iterable[str],
+        prefix_as_string: str,
+        *,
+        kind: str,
+    ) -> None:
+        prefix = self._prefixes[prefix_as_string]
+        assert not prefix.setup
+        prefix.setup = True
+        if not requirements:
+            return
+        self._install_requirements(
+            get_runnable_pip(),
+            finder,
+            requirements,
+            prefix,
+            kind=kind,
+        )
+
+    @staticmethod
+    def _install_requirements(
+        pip_runnable: str,
+        finder: "PackageFinder",
+        requirements: Iterable[str],
+        prefix: _Prefix,
+        *,
+        kind: str,
+    ) -> None:
+        args: List[str] = [
+            sys.executable,
+            pip_runnable,
+            "install",
+            "--ignore-installed",
+            "--no-user",
+            "--prefix",
+            prefix.path,
+            "--no-warn-script-location",
+        ]
+        if logger.getEffectiveLevel() <= logging.DEBUG:
+            args.append("-v")
+        for format_control in ("no_binary", "only_binary"):
+            formats = getattr(finder.format_control, format_control)
+            args.extend(
+                (
+                    "--" + format_control.replace("_", "-"),
+                    ",".join(sorted(formats or {":none:"})),
+                )
+            )
+
+        index_urls = finder.index_urls
+        if index_urls:
+            args.extend(["-i", index_urls[0]])
+            for extra_index in index_urls[1:]:
+                args.extend(["--extra-index-url", extra_index])
+        else:
+            args.append("--no-index")
+        for link in finder.find_links:
+            args.extend(["--find-links", link])
+
+        for host in finder.trusted_hosts:
+            args.extend(["--trusted-host", host])
+        if finder.allow_all_prereleases:
+            args.append("--pre")
+        if finder.prefer_binary:
+            args.append("--prefer-binary")
+        args.append("--")
+        args.extend(requirements)
+        extra_environ = {"_PIP_STANDALONE_CERT": where()}
+        with open_spinner(f"Installing {kind}") as spinner:
+            call_subprocess(
+                args,
+                command_desc=f"pip subprocess to install {kind}",
+                spinner=spinner,
+                extra_environ=extra_environ,
+            )
+
+
+class NoOpBuildEnvironment(BuildEnvironment):
+    """A no-op drop-in replacement for BuildEnvironment"""
+
+    def __init__(self) -> None:
+        pass
+
+    def __enter__(self) -> None:
+        pass
+
+    def __exit__(
+        self,
+        exc_type: Optional[Type[BaseException]],
+        exc_val: Optional[BaseException],
+        exc_tb: Optional[TracebackType],
+    ) -> None:
+        pass
+
+    def cleanup(self) -> None:
+        pass
+
+    def install_requirements(
+        self,
+        finder: "PackageFinder",
+        requirements: Iterable[str],
+        prefix_as_string: str,
+        *,
+        kind: str,
+    ) -> None:
+        raise NotImplementedError()
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cache.py b/venv/lib/python3.9/site-packages/pip/_internal/cache.py
new file mode 100644
index 0000000..c53b7f0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cache.py
@@ -0,0 +1,293 @@
+"""Cache Management
+"""
+
+import hashlib
+import json
+import logging
+import os
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Set
+
+from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import InvalidWheelFilename
+from pip._internal.models.direct_url import DirectUrl
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
+from pip._internal.utils.urls import path_to_url
+
+logger = logging.getLogger(__name__)
+
+ORIGIN_JSON_NAME = "origin.json"
+
+
+def _hash_dict(d: Dict[str, str]) -> str:
+    """Return a stable sha224 of a dictionary."""
+    s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
+    return hashlib.sha224(s.encode("ascii")).hexdigest()
+
+
+class Cache:
+    """An abstract class - provides cache directories for data from links
+
+
+    :param cache_dir: The root of the cache.
+    :param format_control: An object of FormatControl class to limit
+        binaries being read from the cache.
+    :param allowed_formats: which formats of files the cache should store.
+        ('binary' and 'source' are the only allowed values)
+    """
+
+    def __init__(
+        self, cache_dir: str, format_control: FormatControl, allowed_formats: Set[str]
+    ) -> None:
+        super().__init__()
+        assert not cache_dir or os.path.isabs(cache_dir)
+        self.cache_dir = cache_dir or None
+        self.format_control = format_control
+        self.allowed_formats = allowed_formats
+
+        _valid_formats = {"source", "binary"}
+        assert self.allowed_formats.union(_valid_formats) == _valid_formats
+
+    def _get_cache_path_parts(self, link: Link) -> List[str]:
+        """Get parts of part that must be os.path.joined with cache_dir"""
+
+        # We want to generate an url to use as our cache key, we don't want to
+        # just re-use the URL because it might have other items in the fragment
+        # and we don't care about those.
+        key_parts = {"url": link.url_without_fragment}
+        if link.hash_name is not None and link.hash is not None:
+            key_parts[link.hash_name] = link.hash
+        if link.subdirectory_fragment:
+            key_parts["subdirectory"] = link.subdirectory_fragment
+
+        # Include interpreter name, major and minor version in cache key
+        # to cope with ill-behaved sdists that build a different wheel
+        # depending on the python version their setup.py is being run on,
+        # and don't encode the difference in compatibility tags.
+        # https://github.com/pypa/pip/issues/7296
+        key_parts["interpreter_name"] = interpreter_name()
+        key_parts["interpreter_version"] = interpreter_version()
+
+        # Encode our key url with sha224, we'll use this because it has similar
+        # security properties to sha256, but with a shorter total output (and
+        # thus less secure). However the differences don't make a lot of
+        # difference for our use case here.
+        hashed = _hash_dict(key_parts)
+
+        # We want to nest the directories some to prevent having a ton of top
+        # level directories where we might run out of sub directories on some
+        # FS.
+        parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
+
+        return parts
+
+    def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
+        can_not_cache = not self.cache_dir or not canonical_package_name or not link
+        if can_not_cache:
+            return []
+
+        formats = self.format_control.get_allowed_formats(canonical_package_name)
+        if not self.allowed_formats.intersection(formats):
+            return []
+
+        candidates = []
+        path = self.get_path_for_link(link)
+        if os.path.isdir(path):
+            for candidate in os.listdir(path):
+                candidates.append((candidate, path))
+        return candidates
+
+    def get_path_for_link(self, link: Link) -> str:
+        """Return a directory to store cached items in for link."""
+        raise NotImplementedError()
+
+    def get(
+        self,
+        link: Link,
+        package_name: Optional[str],
+        supported_tags: List[Tag],
+    ) -> Link:
+        """Returns a link to a cached item if it exists, otherwise returns the
+        passed link.
+        """
+        raise NotImplementedError()
+
+
+class SimpleWheelCache(Cache):
+    """A cache of wheels for future installs."""
+
+    def __init__(self, cache_dir: str, format_control: FormatControl) -> None:
+        super().__init__(cache_dir, format_control, {"binary"})
+
+    def get_path_for_link(self, link: Link) -> str:
+        """Return a directory to store cached wheels for link
+
+        Because there are M wheels for any one sdist, we provide a directory
+        to cache them in, and then consult that directory when looking up
+        cache hits.
+
+        We only insert things into the cache if they have plausible version
+        numbers, so that we don't contaminate the cache with things that were
+        not unique. E.g. ./package might have dozens of installs done for it
+        and build a version of 0.0...and if we built and cached a wheel, we'd
+        end up using the same wheel even if the source has been edited.
+
+        :param link: The link of the sdist for which this will cache wheels.
+        """
+        parts = self._get_cache_path_parts(link)
+        assert self.cache_dir
+        # Store wheels within the root cache_dir
+        return os.path.join(self.cache_dir, "wheels", *parts)
+
+    def get(
+        self,
+        link: Link,
+        package_name: Optional[str],
+        supported_tags: List[Tag],
+    ) -> Link:
+        candidates = []
+
+        if not package_name:
+            return link
+
+        canonical_package_name = canonicalize_name(package_name)
+        for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
+            try:
+                wheel = Wheel(wheel_name)
+            except InvalidWheelFilename:
+                continue
+            if canonicalize_name(wheel.name) != canonical_package_name:
+                logger.debug(
+                    "Ignoring cached wheel %s for %s as it "
+                    "does not match the expected distribution name %s.",
+                    wheel_name,
+                    link,
+                    package_name,
+                )
+                continue
+            if not wheel.supported(supported_tags):
+                # Built for a different python/arch/etc
+                continue
+            candidates.append(
+                (
+                    wheel.support_index_min(supported_tags),
+                    wheel_name,
+                    wheel_dir,
+                )
+            )
+
+        if not candidates:
+            return link
+
+        _, wheel_name, wheel_dir = min(candidates)
+        return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
+
+
+class EphemWheelCache(SimpleWheelCache):
+    """A SimpleWheelCache that creates it's own temporary cache directory"""
+
+    def __init__(self, format_control: FormatControl) -> None:
+        self._temp_dir = TempDirectory(
+            kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
+            globally_managed=True,
+        )
+
+        super().__init__(self._temp_dir.path, format_control)
+
+
+class CacheEntry:
+    def __init__(
+        self,
+        link: Link,
+        persistent: bool,
+    ):
+        self.link = link
+        self.persistent = persistent
+        self.origin: Optional[DirectUrl] = None
+        origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
+        if origin_direct_url_path.exists():
+            self.origin = DirectUrl.from_json(origin_direct_url_path.read_text())
+
+
+class WheelCache(Cache):
+    """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
+
+    This Cache allows for gracefully degradation, using the ephem wheel cache
+    when a certain link is not found in the simple wheel cache first.
+    """
+
+    def __init__(
+        self, cache_dir: str, format_control: Optional[FormatControl] = None
+    ) -> None:
+        if format_control is None:
+            format_control = FormatControl()
+        super().__init__(cache_dir, format_control, {"binary"})
+        self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
+        self._ephem_cache = EphemWheelCache(format_control)
+
+    def get_path_for_link(self, link: Link) -> str:
+        return self._wheel_cache.get_path_for_link(link)
+
+    def get_ephem_path_for_link(self, link: Link) -> str:
+        return self._ephem_cache.get_path_for_link(link)
+
+    def get(
+        self,
+        link: Link,
+        package_name: Optional[str],
+        supported_tags: List[Tag],
+    ) -> Link:
+        cache_entry = self.get_cache_entry(link, package_name, supported_tags)
+        if cache_entry is None:
+            return link
+        return cache_entry.link
+
+    def get_cache_entry(
+        self,
+        link: Link,
+        package_name: Optional[str],
+        supported_tags: List[Tag],
+    ) -> Optional[CacheEntry]:
+        """Returns a CacheEntry with a link to a cached item if it exists or
+        None. The cache entry indicates if the item was found in the persistent
+        or ephemeral cache.
+        """
+        retval = self._wheel_cache.get(
+            link=link,
+            package_name=package_name,
+            supported_tags=supported_tags,
+        )
+        if retval is not link:
+            return CacheEntry(retval, persistent=True)
+
+        retval = self._ephem_cache.get(
+            link=link,
+            package_name=package_name,
+            supported_tags=supported_tags,
+        )
+        if retval is not link:
+            return CacheEntry(retval, persistent=False)
+
+        return None
+
+    @staticmethod
+    def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
+        origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
+        if origin_path.is_file():
+            origin = DirectUrl.from_json(origin_path.read_text())
+            # TODO: use DirectUrl.equivalent when https://github.com/pypa/pip/pull/10564
+            # is merged.
+            if origin.url != download_info.url:
+                logger.warning(
+                    "Origin URL %s in cache entry %s does not match download URL %s. "
+                    "This is likely a pip bug or a cache corruption issue.",
+                    origin.url,
+                    cache_dir,
+                    download_info.url,
+                )
+        origin_path.write_text(download_info.to_json(), encoding="utf-8")
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/__init__.py
new file mode 100644
index 0000000..e589bb9
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__init__.py
@@ -0,0 +1,4 @@
+"""Subpackage containing all of pip's command line interface related code
+"""
+
+# This file intentionally does not import submodules
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..7e729db
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc
new file mode 100644
index 0000000..09b48ab
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-39.pyc
new file mode 100644
index 0000000..6d9b47f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc
new file mode 100644
index 0000000..125e935
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-39.pyc
new file mode 100644
index 0000000..09f62fc
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main.cpython-39.pyc
new file mode 100644
index 0000000..683594d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc
new file mode 100644
index 0000000..155a8b2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/parser.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/parser.cpython-39.pyc
new file mode 100644
index 0000000..effa5b0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/parser.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc
new file mode 100644
index 0000000..9dc5c6a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-39.pyc
new file mode 100644
index 0000000..5dd5e02
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-39.pyc
new file mode 100644
index 0000000..ba74fce
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc
new file mode 100644
index 0000000..5b96357
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/autocompletion.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/autocompletion.py
new file mode 100644
index 0000000..226fe84
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/autocompletion.py
@@ -0,0 +1,171 @@
+"""Logic that powers autocompletion installed by ``pip completion``.
+"""
+
+import optparse
+import os
+import sys
+from itertools import chain
+from typing import Any, Iterable, List, Optional
+
+from pip._internal.cli.main_parser import create_main_parser
+from pip._internal.commands import commands_dict, create_command
+from pip._internal.metadata import get_default_environment
+
+
+def autocomplete() -> None:
+    """Entry Point for completion of main and subcommand options."""
+    # Don't complete if user hasn't sourced bash_completion file.
+    if "PIP_AUTO_COMPLETE" not in os.environ:
+        return
+    cwords = os.environ["COMP_WORDS"].split()[1:]
+    cword = int(os.environ["COMP_CWORD"])
+    try:
+        current = cwords[cword - 1]
+    except IndexError:
+        current = ""
+
+    parser = create_main_parser()
+    subcommands = list(commands_dict)
+    options = []
+
+    # subcommand
+    subcommand_name: Optional[str] = None
+    for word in cwords:
+        if word in subcommands:
+            subcommand_name = word
+            break
+    # subcommand options
+    if subcommand_name is not None:
+        # special case: 'help' subcommand has no options
+        if subcommand_name == "help":
+            sys.exit(1)
+        # special case: list locally installed dists for show and uninstall
+        should_list_installed = not current.startswith("-") and subcommand_name in [
+            "show",
+            "uninstall",
+        ]
+        if should_list_installed:
+            env = get_default_environment()
+            lc = current.lower()
+            installed = [
+                dist.canonical_name
+                for dist in env.iter_installed_distributions(local_only=True)
+                if dist.canonical_name.startswith(lc)
+                and dist.canonical_name not in cwords[1:]
+            ]
+            # if there are no dists installed, fall back to option completion
+            if installed:
+                for dist in installed:
+                    print(dist)
+                sys.exit(1)
+
+        should_list_installables = (
+            not current.startswith("-") and subcommand_name == "install"
+        )
+        if should_list_installables:
+            for path in auto_complete_paths(current, "path"):
+                print(path)
+            sys.exit(1)
+
+        subcommand = create_command(subcommand_name)
+
+        for opt in subcommand.parser.option_list_all:
+            if opt.help != optparse.SUPPRESS_HELP:
+                for opt_str in opt._long_opts + opt._short_opts:
+                    options.append((opt_str, opt.nargs))
+
+        # filter out previously specified options from available options
+        prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
+        options = [(x, v) for (x, v) in options if x not in prev_opts]
+        # filter options by current input
+        options = [(k, v) for k, v in options if k.startswith(current)]
+        # get completion type given cwords and available subcommand options
+        completion_type = get_path_completion_type(
+            cwords,
+            cword,
+            subcommand.parser.option_list_all,
+        )
+        # get completion files and directories if ``completion_type`` is
+        # ``<file>``, ``<dir>`` or ``<path>``
+        if completion_type:
+            paths = auto_complete_paths(current, completion_type)
+            options = [(path, 0) for path in paths]
+        for option in options:
+            opt_label = option[0]
+            # append '=' to options which require args
+            if option[1] and option[0][:2] == "--":
+                opt_label += "="
+            print(opt_label)
+    else:
+        # show main parser options only when necessary
+
+        opts = [i.option_list for i in parser.option_groups]
+        opts.append(parser.option_list)
+        flattened_opts = chain.from_iterable(opts)
+        if current.startswith("-"):
+            for opt in flattened_opts:
+                if opt.help != optparse.SUPPRESS_HELP:
+                    subcommands += opt._long_opts + opt._short_opts
+        else:
+            # get completion type given cwords and all available options
+            completion_type = get_path_completion_type(cwords, cword, flattened_opts)
+            if completion_type:
+                subcommands = list(auto_complete_paths(current, completion_type))
+
+        print(" ".join([x for x in subcommands if x.startswith(current)]))
+    sys.exit(1)
+
+
+def get_path_completion_type(
+    cwords: List[str], cword: int, opts: Iterable[Any]
+) -> Optional[str]:
+    """Get the type of path completion (``file``, ``dir``, ``path`` or None)
+
+    :param cwords: same as the environmental variable ``COMP_WORDS``
+    :param cword: same as the environmental variable ``COMP_CWORD``
+    :param opts: The available options to check
+    :return: path completion type (``file``, ``dir``, ``path`` or None)
+    """
+    if cword < 2 or not cwords[cword - 2].startswith("-"):
+        return None
+    for opt in opts:
+        if opt.help == optparse.SUPPRESS_HELP:
+            continue
+        for o in str(opt).split("/"):
+            if cwords[cword - 2].split("=")[0] == o:
+                if not opt.metavar or any(
+                    x in ("path", "file", "dir") for x in opt.metavar.split("/")
+                ):
+                    return opt.metavar
+    return None
+
+
+def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
+    """If ``completion_type`` is ``file`` or ``path``, list all regular files
+    and directories starting with ``current``; otherwise only list directories
+    starting with ``current``.
+
+    :param current: The word to be completed
+    :param completion_type: path completion type(``file``, ``path`` or ``dir``)
+    :return: A generator of regular files and/or directories
+    """
+    directory, filename = os.path.split(current)
+    current_path = os.path.abspath(directory)
+    # Don't complete paths if they can't be accessed
+    if not os.access(current_path, os.R_OK):
+        return
+    filename = os.path.normcase(filename)
+    # list all files that start with ``filename``
+    file_list = (
+        x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
+    )
+    for f in file_list:
+        opt = os.path.join(current_path, f)
+        comp_file = os.path.normcase(os.path.join(directory, f))
+        # complete regular files when there is not ``<dir>`` after option
+        # complete directories when there is ``<file>``, ``<path>`` or
+        # ``<dir>``after option
+        if completion_type != "dir" and os.path.isfile(opt):
+            yield comp_file
+        elif os.path.isdir(opt):
+            yield os.path.join(comp_file, "")
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/base_command.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/base_command.py
new file mode 100644
index 0000000..5bd7e67
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/base_command.py
@@ -0,0 +1,216 @@
+"""Base Command class, and related routines"""
+
+import functools
+import logging
+import logging.config
+import optparse
+import os
+import sys
+import traceback
+from optparse import Values
+from typing import Any, Callable, List, Optional, Tuple
+
+from pip._vendor.rich import traceback as rich_traceback
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.command_context import CommandContextMixIn
+from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
+from pip._internal.cli.status_codes import (
+    ERROR,
+    PREVIOUS_BUILD_DIR_ERROR,
+    UNKNOWN_ERROR,
+    VIRTUALENV_NOT_FOUND,
+)
+from pip._internal.exceptions import (
+    BadCommand,
+    CommandError,
+    DiagnosticPipError,
+    InstallationError,
+    NetworkConnectionError,
+    PreviousBuildDirError,
+    UninstallationError,
+)
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
+from pip._internal.utils.misc import get_prog, normalize_path
+from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
+from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+__all__ = ["Command"]
+
+logger = logging.getLogger(__name__)
+
+
+class Command(CommandContextMixIn):
+    usage: str = ""
+    ignore_require_venv: bool = False
+
+    def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
+        super().__init__()
+
+        self.name = name
+        self.summary = summary
+        self.parser = ConfigOptionParser(
+            usage=self.usage,
+            prog=f"{get_prog()} {name}",
+            formatter=UpdatingDefaultsHelpFormatter(),
+            add_help_option=False,
+            name=name,
+            description=self.__doc__,
+            isolated=isolated,
+        )
+
+        self.tempdir_registry: Optional[TempDirRegistry] = None
+
+        # Commands should add options to this option group
+        optgroup_name = f"{self.name.capitalize()} Options"
+        self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
+
+        # Add the general options
+        gen_opts = cmdoptions.make_option_group(
+            cmdoptions.general_group,
+            self.parser,
+        )
+        self.parser.add_option_group(gen_opts)
+
+        self.add_options()
+
+    def add_options(self) -> None:
+        pass
+
+    def handle_pip_version_check(self, options: Values) -> None:
+        """
+        This is a no-op so that commands by default do not do the pip version
+        check.
+        """
+        # Make sure we do the pip version check if the index_group options
+        # are present.
+        assert not hasattr(options, "no_index")
+
+    def run(self, options: Values, args: List[str]) -> int:
+        raise NotImplementedError
+
+    def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
+        # factored out for testability
+        return self.parser.parse_args(args)
+
+    def main(self, args: List[str]) -> int:
+        try:
+            with self.main_context():
+                return self._main(args)
+        finally:
+            logging.shutdown()
+
+    def _main(self, args: List[str]) -> int:
+        # We must initialize this before the tempdir manager, otherwise the
+        # configuration would not be accessible by the time we clean up the
+        # tempdir manager.
+        self.tempdir_registry = self.enter_context(tempdir_registry())
+        # Intentionally set as early as possible so globally-managed temporary
+        # directories are available to the rest of the code.
+        self.enter_context(global_tempdir_manager())
+
+        options, args = self.parse_args(args)
+
+        # Set verbosity so that it can be used elsewhere.
+        self.verbosity = options.verbose - options.quiet
+
+        level_number = setup_logging(
+            verbosity=self.verbosity,
+            no_color=options.no_color,
+            user_log_file=options.log,
+        )
+
+        # TODO: Try to get these passing down from the command?
+        #       without resorting to os.environ to hold these.
+        #       This also affects isolated builds and it should.
+
+        if options.no_input:
+            os.environ["PIP_NO_INPUT"] = "1"
+
+        if options.exists_action:
+            os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
+
+        if options.require_venv and not self.ignore_require_venv:
+            # If a venv is required check if it can really be found
+            if not running_under_virtualenv():
+                logger.critical("Could not find an activated virtualenv (required).")
+                sys.exit(VIRTUALENV_NOT_FOUND)
+
+        if options.cache_dir:
+            options.cache_dir = normalize_path(options.cache_dir)
+            if not check_path_owner(options.cache_dir):
+                logger.warning(
+                    "The directory '%s' or its parent directory is not owned "
+                    "or is not writable by the current user. The cache "
+                    "has been disabled. Check the permissions and owner of "
+                    "that directory. If executing pip with sudo, you should "
+                    "use sudo's -H flag.",
+                    options.cache_dir,
+                )
+                options.cache_dir = None
+
+        def intercepts_unhandled_exc(
+            run_func: Callable[..., int]
+        ) -> Callable[..., int]:
+            @functools.wraps(run_func)
+            def exc_logging_wrapper(*args: Any) -> int:
+                try:
+                    status = run_func(*args)
+                    assert isinstance(status, int)
+                    return status
+                except DiagnosticPipError as exc:
+                    logger.error("[present-rich] %s", exc)
+                    logger.debug("Exception information:", exc_info=True)
+
+                    return ERROR
+                except PreviousBuildDirError as exc:
+                    logger.critical(str(exc))
+                    logger.debug("Exception information:", exc_info=True)
+
+                    return PREVIOUS_BUILD_DIR_ERROR
+                except (
+                    InstallationError,
+                    UninstallationError,
+                    BadCommand,
+                    NetworkConnectionError,
+                ) as exc:
+                    logger.critical(str(exc))
+                    logger.debug("Exception information:", exc_info=True)
+
+                    return ERROR
+                except CommandError as exc:
+                    logger.critical("%s", exc)
+                    logger.debug("Exception information:", exc_info=True)
+
+                    return ERROR
+                except BrokenStdoutLoggingError:
+                    # Bypass our logger and write any remaining messages to
+                    # stderr because stdout no longer works.
+                    print("ERROR: Pipe to stdout was broken", file=sys.stderr)
+                    if level_number <= logging.DEBUG:
+                        traceback.print_exc(file=sys.stderr)
+
+                    return ERROR
+                except KeyboardInterrupt:
+                    logger.critical("Operation cancelled by user")
+                    logger.debug("Exception information:", exc_info=True)
+
+                    return ERROR
+                except BaseException:
+                    logger.critical("Exception:", exc_info=True)
+
+                    return UNKNOWN_ERROR
+
+            return exc_logging_wrapper
+
+        try:
+            if not options.debug_mode:
+                run = intercepts_unhandled_exc(self.run)
+            else:
+                run = self.run
+                rich_traceback.install(show_locals=True)
+            return run(options, args)
+        finally:
+            self.handle_pip_version_check(options)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/cmdoptions.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/cmdoptions.py
new file mode 100644
index 0000000..b4e2560
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/cmdoptions.py
@@ -0,0 +1,1049 @@
+"""
+shared options and groups
+
+The principle here is to define options once, but *not* instantiate them
+globally. One reason being that options with action='append' can carry state
+between parses. pip parses general options twice internally, and shouldn't
+pass on state. To be consistent, all options will follow this design.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import importlib.util
+import logging
+import os
+import textwrap
+from functools import partial
+from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
+from textwrap import dedent
+from typing import Any, Callable, Dict, Optional, Tuple
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.parser import ConfigOptionParser
+from pip._internal.exceptions import CommandError
+from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.index import PyPI
+from pip._internal.models.target_python import TargetPython
+from pip._internal.utils.hashes import STRONG_HASHES
+from pip._internal.utils.misc import strtobool
+
+logger = logging.getLogger(__name__)
+
+
+def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
+    """
+    Raise an option parsing error using parser.error().
+
+    Args:
+      parser: an OptionParser instance.
+      option: an Option instance.
+      msg: the error text.
+    """
+    msg = f"{option} error: {msg}"
+    msg = textwrap.fill(" ".join(msg.split()))
+    parser.error(msg)
+
+
+def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
+    """
+    Return an OptionGroup object
+    group  -- assumed to be dict with 'name' and 'options' keys
+    parser -- an optparse Parser
+    """
+    option_group = OptionGroup(parser, group["name"])
+    for option in group["options"]:
+        option_group.add_option(option())
+    return option_group
+
+
+def check_dist_restriction(options: Values, check_target: bool = False) -> None:
+    """Function for determining if custom platform options are allowed.
+
+    :param options: The OptionParser options.
+    :param check_target: Whether or not to check if --target is being used.
+    """
+    dist_restriction_set = any(
+        [
+            options.python_version,
+            options.platforms,
+            options.abis,
+            options.implementation,
+        ]
+    )
+
+    binary_only = FormatControl(set(), {":all:"})
+    sdist_dependencies_allowed = (
+        options.format_control != binary_only and not options.ignore_dependencies
+    )
+
+    # Installations or downloads using dist restrictions must not combine
+    # source distributions and dist-specific wheels, as they are not
+    # guaranteed to be locally compatible.
+    if dist_restriction_set and sdist_dependencies_allowed:
+        raise CommandError(
+            "When restricting platform and interpreter constraints using "
+            "--python-version, --platform, --abi, or --implementation, "
+            "either --no-deps must be set, or --only-binary=:all: must be "
+            "set and --no-binary must not be set (or must be set to "
+            ":none:)."
+        )
+
+    if check_target:
+        if dist_restriction_set and not options.target_dir:
+            raise CommandError(
+                "Can not use any platform or abi specific options unless "
+                "installing via '--target'"
+            )
+
+
+def _path_option_check(option: Option, opt: str, value: str) -> str:
+    return os.path.expanduser(value)
+
+
+def _package_name_option_check(option: Option, opt: str, value: str) -> str:
+    return canonicalize_name(value)
+
+
+class PipOption(Option):
+    TYPES = Option.TYPES + ("path", "package_name")
+    TYPE_CHECKER = Option.TYPE_CHECKER.copy()
+    TYPE_CHECKER["package_name"] = _package_name_option_check
+    TYPE_CHECKER["path"] = _path_option_check
+
+
+###########
+# options #
+###########
+
+help_: Callable[..., Option] = partial(
+    Option,
+    "-h",
+    "--help",
+    dest="help",
+    action="help",
+    help="Show help.",
+)
+
+debug_mode: Callable[..., Option] = partial(
+    Option,
+    "--debug",
+    dest="debug_mode",
+    action="store_true",
+    default=False,
+    help=(
+        "Let unhandled exceptions propagate outside the main subroutine, "
+        "instead of logging them to stderr."
+    ),
+)
+
+isolated_mode: Callable[..., Option] = partial(
+    Option,
+    "--isolated",
+    dest="isolated_mode",
+    action="store_true",
+    default=False,
+    help=(
+        "Run pip in an isolated mode, ignoring environment variables and user "
+        "configuration."
+    ),
+)
+
+require_virtualenv: Callable[..., Option] = partial(
+    Option,
+    "--require-virtualenv",
+    "--require-venv",
+    dest="require_venv",
+    action="store_true",
+    default=False,
+    help=(
+        "Allow pip to only run in a virtual environment; "
+        "exit with an error otherwise."
+    ),
+)
+
+python: Callable[..., Option] = partial(
+    Option,
+    "--python",
+    dest="python",
+    help="Run pip with the specified Python interpreter.",
+)
+
+verbose: Callable[..., Option] = partial(
+    Option,
+    "-v",
+    "--verbose",
+    dest="verbose",
+    action="count",
+    default=0,
+    help="Give more output. Option is additive, and can be used up to 3 times.",
+)
+
+no_color: Callable[..., Option] = partial(
+    Option,
+    "--no-color",
+    dest="no_color",
+    action="store_true",
+    default=False,
+    help="Suppress colored output.",
+)
+
+version: Callable[..., Option] = partial(
+    Option,
+    "-V",
+    "--version",
+    dest="version",
+    action="store_true",
+    help="Show version and exit.",
+)
+
+quiet: Callable[..., Option] = partial(
+    Option,
+    "-q",
+    "--quiet",
+    dest="quiet",
+    action="count",
+    default=0,
+    help=(
+        "Give less output. Option is additive, and can be used up to 3"
+        " times (corresponding to WARNING, ERROR, and CRITICAL logging"
+        " levels)."
+    ),
+)
+
+progress_bar: Callable[..., Option] = partial(
+    Option,
+    "--progress-bar",
+    dest="progress_bar",
+    type="choice",
+    choices=["on", "off"],
+    default="on",
+    help="Specify whether the progress bar should be used [on, off] (default: on)",
+)
+
+log: Callable[..., Option] = partial(
+    PipOption,
+    "--log",
+    "--log-file",
+    "--local-log",
+    dest="log",
+    metavar="path",
+    type="path",
+    help="Path to a verbose appending log.",
+)
+
+no_input: Callable[..., Option] = partial(
+    Option,
+    # Don't ask for input
+    "--no-input",
+    dest="no_input",
+    action="store_true",
+    default=False,
+    help="Disable prompting for input.",
+)
+
+proxy: Callable[..., Option] = partial(
+    Option,
+    "--proxy",
+    dest="proxy",
+    type="str",
+    default="",
+    help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
+)
+
+retries: Callable[..., Option] = partial(
+    Option,
+    "--retries",
+    dest="retries",
+    type="int",
+    default=5,
+    help="Maximum number of retries each connection should attempt "
+    "(default %default times).",
+)
+
+timeout: Callable[..., Option] = partial(
+    Option,
+    "--timeout",
+    "--default-timeout",
+    metavar="sec",
+    dest="timeout",
+    type="float",
+    default=15,
+    help="Set the socket timeout (default %default seconds).",
+)
+
+
+def exists_action() -> Option:
+    return Option(
+        # Option when path already exist
+        "--exists-action",
+        dest="exists_action",
+        type="choice",
+        choices=["s", "i", "w", "b", "a"],
+        default=[],
+        action="append",
+        metavar="action",
+        help="Default action when a path already exists: "
+        "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
+    )
+
+
+cert: Callable[..., Option] = partial(
+    PipOption,
+    "--cert",
+    dest="cert",
+    type="path",
+    metavar="path",
+    help=(
+        "Path to PEM-encoded CA certificate bundle. "
+        "If provided, overrides the default. "
+        "See 'SSL Certificate Verification' in pip documentation "
+        "for more information."
+    ),
+)
+
+client_cert: Callable[..., Option] = partial(
+    PipOption,
+    "--client-cert",
+    dest="client_cert",
+    type="path",
+    default=None,
+    metavar="path",
+    help="Path to SSL client certificate, a single file containing the "
+    "private key and the certificate in PEM format.",
+)
+
+index_url: Callable[..., Option] = partial(
+    Option,
+    "-i",
+    "--index-url",
+    "--pypi-url",
+    dest="index_url",
+    metavar="URL",
+    default=PyPI.simple_url,
+    help="Base URL of the Python Package Index (default %default). "
+    "This should point to a repository compliant with PEP 503 "
+    "(the simple repository API) or a local directory laid out "
+    "in the same format.",
+)
+
+
+def extra_index_url() -> Option:
+    return Option(
+        "--extra-index-url",
+        dest="extra_index_urls",
+        metavar="URL",
+        action="append",
+        default=[],
+        help="Extra URLs of package indexes to use in addition to "
+        "--index-url. Should follow the same rules as "
+        "--index-url.",
+    )
+
+
+no_index: Callable[..., Option] = partial(
+    Option,
+    "--no-index",
+    dest="no_index",
+    action="store_true",
+    default=False,
+    help="Ignore package index (only looking at --find-links URLs instead).",
+)
+
+
+def find_links() -> Option:
+    return Option(
+        "-f",
+        "--find-links",
+        dest="find_links",
+        action="append",
+        default=[],
+        metavar="url",
+        help="If a URL or path to an html file, then parse for links to "
+        "archives such as sdist (.tar.gz) or wheel (.whl) files. "
+        "If a local path or file:// URL that's a directory, "
+        "then look for archives in the directory listing. "
+        "Links to VCS project URLs are not supported.",
+    )
+
+
+def trusted_host() -> Option:
+    return Option(
+        "--trusted-host",
+        dest="trusted_hosts",
+        action="append",
+        metavar="HOSTNAME",
+        default=[],
+        help="Mark this host or host:port pair as trusted, even though it "
+        "does not have valid or any HTTPS.",
+    )
+
+
+def constraints() -> Option:
+    return Option(
+        "-c",
+        "--constraint",
+        dest="constraints",
+        action="append",
+        default=[],
+        metavar="file",
+        help="Constrain versions using the given constraints file. "
+        "This option can be used multiple times.",
+    )
+
+
+def requirements() -> Option:
+    return Option(
+        "-r",
+        "--requirement",
+        dest="requirements",
+        action="append",
+        default=[],
+        metavar="file",
+        help="Install from the given requirements file. "
+        "This option can be used multiple times.",
+    )
+
+
+def editable() -> Option:
+    return Option(
+        "-e",
+        "--editable",
+        dest="editables",
+        action="append",
+        default=[],
+        metavar="path/url",
+        help=(
+            "Install a project in editable mode (i.e. setuptools "
+            '"develop mode") from a local project path or a VCS url.'
+        ),
+    )
+
+
+def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
+    value = os.path.abspath(value)
+    setattr(parser.values, option.dest, value)
+
+
+src: Callable[..., Option] = partial(
+    PipOption,
+    "--src",
+    "--source",
+    "--source-dir",
+    "--source-directory",
+    dest="src_dir",
+    type="path",
+    metavar="dir",
+    default=get_src_prefix(),
+    action="callback",
+    callback=_handle_src,
+    help="Directory to check out editable projects into. "
+    'The default in a virtualenv is "<venv path>/src". '
+    'The default for global installs is "<current dir>/src".',
+)
+
+
+def _get_format_control(values: Values, option: Option) -> Any:
+    """Get a format_control object."""
+    return getattr(values, option.dest)
+
+
+def _handle_no_binary(
+    option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+    existing = _get_format_control(parser.values, option)
+    FormatControl.handle_mutual_excludes(
+        value,
+        existing.no_binary,
+        existing.only_binary,
+    )
+
+
+def _handle_only_binary(
+    option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+    existing = _get_format_control(parser.values, option)
+    FormatControl.handle_mutual_excludes(
+        value,
+        existing.only_binary,
+        existing.no_binary,
+    )
+
+
+def no_binary() -> Option:
+    format_control = FormatControl(set(), set())
+    return Option(
+        "--no-binary",
+        dest="format_control",
+        action="callback",
+        callback=_handle_no_binary,
+        type="str",
+        default=format_control,
+        help="Do not use binary packages. Can be supplied multiple times, and "
+        'each time adds to the existing value. Accepts either ":all:" to '
+        'disable all binary packages, ":none:" to empty the set (notice '
+        "the colons), or one or more package names with commas between "
+        "them (no colons). Note that some packages are tricky to compile "
+        "and may fail to install when this option is used on them.",
+    )
+
+
+def only_binary() -> Option:
+    format_control = FormatControl(set(), set())
+    return Option(
+        "--only-binary",
+        dest="format_control",
+        action="callback",
+        callback=_handle_only_binary,
+        type="str",
+        default=format_control,
+        help="Do not use source packages. Can be supplied multiple times, and "
+        'each time adds to the existing value. Accepts either ":all:" to '
+        'disable all source packages, ":none:" to empty the set, or one '
+        "or more package names with commas between them. Packages "
+        "without binary distributions will fail to install when this "
+        "option is used on them.",
+    )
+
+
+platforms: Callable[..., Option] = partial(
+    Option,
+    "--platform",
+    dest="platforms",
+    metavar="platform",
+    action="append",
+    default=None,
+    help=(
+        "Only use wheels compatible with <platform>. Defaults to the "
+        "platform of the running system. Use this option multiple times to "
+        "specify multiple platforms supported by the target interpreter."
+    ),
+)
+
+
+# This was made a separate function for unit-testing purposes.
+def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
+    """
+    Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
+
+    :return: A 2-tuple (version_info, error_msg), where `error_msg` is
+        non-None if and only if there was a parsing error.
+    """
+    if not value:
+        # The empty string is the same as not providing a value.
+        return (None, None)
+
+    parts = value.split(".")
+    if len(parts) > 3:
+        return ((), "at most three version parts are allowed")
+
+    if len(parts) == 1:
+        # Then we are in the case of "3" or "37".
+        value = parts[0]
+        if len(value) > 1:
+            parts = [value[0], value[1:]]
+
+    try:
+        version_info = tuple(int(part) for part in parts)
+    except ValueError:
+        return ((), "each version part must be an integer")
+
+    return (version_info, None)
+
+
+def _handle_python_version(
+    option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+    """
+    Handle a provided --python-version value.
+    """
+    version_info, error_msg = _convert_python_version(value)
+    if error_msg is not None:
+        msg = "invalid --python-version value: {!r}: {}".format(
+            value,
+            error_msg,
+        )
+        raise_option_error(parser, option=option, msg=msg)
+
+    parser.values.python_version = version_info
+
+
+python_version: Callable[..., Option] = partial(
+    Option,
+    "--python-version",
+    dest="python_version",
+    metavar="python_version",
+    action="callback",
+    callback=_handle_python_version,
+    type="str",
+    default=None,
+    help=dedent(
+        """\
+    The Python interpreter version to use for wheel and "Requires-Python"
+    compatibility checks. Defaults to a version derived from the running
+    interpreter. The version can be specified using up to three dot-separated
+    integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
+    version can also be given as a string without dots (e.g. "37" for 3.7.0).
+    """
+    ),
+)
+
+
+implementation: Callable[..., Option] = partial(
+    Option,
+    "--implementation",
+    dest="implementation",
+    metavar="implementation",
+    default=None,
+    help=(
+        "Only use wheels compatible with Python "
+        "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
+        " or 'ip'. If not specified, then the current "
+        "interpreter implementation is used.  Use 'py' to force "
+        "implementation-agnostic wheels."
+    ),
+)
+
+
+abis: Callable[..., Option] = partial(
+    Option,
+    "--abi",
+    dest="abis",
+    metavar="abi",
+    action="append",
+    default=None,
+    help=(
+        "Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
+        "If not specified, then the current interpreter abi tag is used. "
+        "Use this option multiple times to specify multiple abis supported "
+        "by the target interpreter. Generally you will need to specify "
+        "--implementation, --platform, and --python-version when using this "
+        "option."
+    ),
+)
+
+
+def add_target_python_options(cmd_opts: OptionGroup) -> None:
+    cmd_opts.add_option(platforms())
+    cmd_opts.add_option(python_version())
+    cmd_opts.add_option(implementation())
+    cmd_opts.add_option(abis())
+
+
+def make_target_python(options: Values) -> TargetPython:
+    target_python = TargetPython(
+        platforms=options.platforms,
+        py_version_info=options.python_version,
+        abis=options.abis,
+        implementation=options.implementation,
+    )
+
+    return target_python
+
+
+def prefer_binary() -> Option:
+    return Option(
+        "--prefer-binary",
+        dest="prefer_binary",
+        action="store_true",
+        default=False,
+        help="Prefer older binary packages over newer source packages.",
+    )
+
+
+cache_dir: Callable[..., Option] = partial(
+    PipOption,
+    "--cache-dir",
+    dest="cache_dir",
+    default=USER_CACHE_DIR,
+    metavar="dir",
+    type="path",
+    help="Store the cache data in <dir>.",
+)
+
+
+def _handle_no_cache_dir(
+    option: Option, opt: str, value: str, parser: OptionParser
+) -> None:
+    """
+    Process a value provided for the --no-cache-dir option.
+
+    This is an optparse.Option callback for the --no-cache-dir option.
+    """
+    # The value argument will be None if --no-cache-dir is passed via the
+    # command-line, since the option doesn't accept arguments.  However,
+    # the value can be non-None if the option is triggered e.g. by an
+    # environment variable, like PIP_NO_CACHE_DIR=true.
+    if value is not None:
+        # Then parse the string value to get argument error-checking.
+        try:
+            strtobool(value)
+        except ValueError as exc:
+            raise_option_error(parser, option=option, msg=str(exc))
+
+    # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
+    # converted to 0 (like "false" or "no") caused cache_dir to be disabled
+    # rather than enabled (logic would say the latter).  Thus, we disable
+    # the cache directory not just on values that parse to True, but (for
+    # backwards compatibility reasons) also on values that parse to False.
+    # In other words, always set it to False if the option is provided in
+    # some (valid) form.
+    parser.values.cache_dir = False
+
+
+no_cache: Callable[..., Option] = partial(
+    Option,
+    "--no-cache-dir",
+    dest="cache_dir",
+    action="callback",
+    callback=_handle_no_cache_dir,
+    help="Disable the cache.",
+)
+
+no_deps: Callable[..., Option] = partial(
+    Option,
+    "--no-deps",
+    "--no-dependencies",
+    dest="ignore_dependencies",
+    action="store_true",
+    default=False,
+    help="Don't install package dependencies.",
+)
+
+ignore_requires_python: Callable[..., Option] = partial(
+    Option,
+    "--ignore-requires-python",
+    dest="ignore_requires_python",
+    action="store_true",
+    help="Ignore the Requires-Python information.",
+)
+
+no_build_isolation: Callable[..., Option] = partial(
+    Option,
+    "--no-build-isolation",
+    dest="build_isolation",
+    action="store_false",
+    default=True,
+    help="Disable isolation when building a modern source distribution. "
+    "Build dependencies specified by PEP 518 must be already installed "
+    "if this option is used.",
+)
+
+check_build_deps: Callable[..., Option] = partial(
+    Option,
+    "--check-build-dependencies",
+    dest="check_build_deps",
+    action="store_true",
+    default=False,
+    help="Check the build dependencies when PEP517 is used.",
+)
+
+
+def _handle_no_use_pep517(
+    option: Option, opt: str, value: str, parser: OptionParser
+) -> None:
+    """
+    Process a value provided for the --no-use-pep517 option.
+
+    This is an optparse.Option callback for the no_use_pep517 option.
+    """
+    # Since --no-use-pep517 doesn't accept arguments, the value argument
+    # will be None if --no-use-pep517 is passed via the command-line.
+    # However, the value can be non-None if the option is triggered e.g.
+    # by an environment variable, for example "PIP_NO_USE_PEP517=true".
+    if value is not None:
+        msg = """A value was passed for --no-use-pep517,
+        probably using either the PIP_NO_USE_PEP517 environment variable
+        or the "no-use-pep517" config file option. Use an appropriate value
+        of the PIP_USE_PEP517 environment variable or the "use-pep517"
+        config file option instead.
+        """
+        raise_option_error(parser, option=option, msg=msg)
+
+    # If user doesn't wish to use pep517, we check if setuptools is installed
+    # and raise error if it is not.
+    if not importlib.util.find_spec("setuptools"):
+        msg = "It is not possible to use --no-use-pep517 without setuptools installed."
+        raise_option_error(parser, option=option, msg=msg)
+
+    # Otherwise, --no-use-pep517 was passed via the command-line.
+    parser.values.use_pep517 = False
+
+
+use_pep517: Any = partial(
+    Option,
+    "--use-pep517",
+    dest="use_pep517",
+    action="store_true",
+    default=None,
+    help="Use PEP 517 for building source distributions "
+    "(use --no-use-pep517 to force legacy behaviour).",
+)
+
+no_use_pep517: Any = partial(
+    Option,
+    "--no-use-pep517",
+    dest="use_pep517",
+    action="callback",
+    callback=_handle_no_use_pep517,
+    default=None,
+    help=SUPPRESS_HELP,
+)
+
+
+def _handle_config_settings(
+    option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+    key, sep, val = value.partition("=")
+    if sep != "=":
+        parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL")  # noqa
+    dest = getattr(parser.values, option.dest)
+    if dest is None:
+        dest = {}
+        setattr(parser.values, option.dest, dest)
+    dest[key] = val
+
+
+config_settings: Callable[..., Option] = partial(
+    Option,
+    "--config-settings",
+    dest="config_settings",
+    type=str,
+    action="callback",
+    callback=_handle_config_settings,
+    metavar="settings",
+    help="Configuration settings to be passed to the PEP 517 build backend. "
+    "Settings take the form KEY=VALUE. Use multiple --config-settings options "
+    "to pass multiple keys to the backend.",
+)
+
+install_options: Callable[..., Option] = partial(
+    Option,
+    "--install-option",
+    dest="install_options",
+    action="append",
+    metavar="options",
+    help="Extra arguments to be supplied to the setup.py install "
+    'command (use like --install-option="--install-scripts=/usr/local/'
+    'bin"). Use multiple --install-option options to pass multiple '
+    "options to setup.py install. If you are using an option with a "
+    "directory path, be sure to use absolute path.",
+)
+
+build_options: Callable[..., Option] = partial(
+    Option,
+    "--build-option",
+    dest="build_options",
+    metavar="options",
+    action="append",
+    help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
+)
+
+global_options: Callable[..., Option] = partial(
+    Option,
+    "--global-option",
+    dest="global_options",
+    action="append",
+    metavar="options",
+    help="Extra global options to be supplied to the setup.py "
+    "call before the install or bdist_wheel command.",
+)
+
+no_clean: Callable[..., Option] = partial(
+    Option,
+    "--no-clean",
+    action="store_true",
+    default=False,
+    help="Don't clean up build directories.",
+)
+
+pre: Callable[..., Option] = partial(
+    Option,
+    "--pre",
+    action="store_true",
+    default=False,
+    help="Include pre-release and development versions. By default, "
+    "pip only finds stable versions.",
+)
+
+disable_pip_version_check: Callable[..., Option] = partial(
+    Option,
+    "--disable-pip-version-check",
+    dest="disable_pip_version_check",
+    action="store_true",
+    default=False,
+    help="Don't periodically check PyPI to determine whether a new version "
+    "of pip is available for download. Implied with --no-index.",
+)
+
+root_user_action: Callable[..., Option] = partial(
+    Option,
+    "--root-user-action",
+    dest="root_user_action",
+    default="warn",
+    choices=["warn", "ignore"],
+    help="Action if pip is run as a root user. By default, a warning message is shown.",
+)
+
+
+def _handle_merge_hash(
+    option: Option, opt_str: str, value: str, parser: OptionParser
+) -> None:
+    """Given a value spelled "algo:digest", append the digest to a list
+    pointed to in a dict by the algo name."""
+    if not parser.values.hashes:
+        parser.values.hashes = {}
+    try:
+        algo, digest = value.split(":", 1)
+    except ValueError:
+        parser.error(
+            "Arguments to {} must be a hash name "  # noqa
+            "followed by a value, like --hash=sha256:"
+            "abcde...".format(opt_str)
+        )
+    if algo not in STRONG_HASHES:
+        parser.error(
+            "Allowed hash algorithms for {} are {}.".format(  # noqa
+                opt_str, ", ".join(STRONG_HASHES)
+            )
+        )
+    parser.values.hashes.setdefault(algo, []).append(digest)
+
+
+hash: Callable[..., Option] = partial(
+    Option,
+    "--hash",
+    # Hash values eventually end up in InstallRequirement.hashes due to
+    # __dict__ copying in process_line().
+    dest="hashes",
+    action="callback",
+    callback=_handle_merge_hash,
+    type="string",
+    help="Verify that the package's archive matches this "
+    "hash before installing. Example: --hash=sha256:abcdef...",
+)
+
+
+require_hashes: Callable[..., Option] = partial(
+    Option,
+    "--require-hashes",
+    dest="require_hashes",
+    action="store_true",
+    default=False,
+    help="Require a hash to check each requirement against, for "
+    "repeatable installs. This option is implied when any package in a "
+    "requirements file has a --hash option.",
+)
+
+
+list_path: Callable[..., Option] = partial(
+    PipOption,
+    "--path",
+    dest="path",
+    type="path",
+    action="append",
+    help="Restrict to the specified installation path for listing "
+    "packages (can be used multiple times).",
+)
+
+
+def check_list_path_option(options: Values) -> None:
+    if options.path and (options.user or options.local):
+        raise CommandError("Cannot combine '--path' with '--user' or '--local'")
+
+
+list_exclude: Callable[..., Option] = partial(
+    PipOption,
+    "--exclude",
+    dest="excludes",
+    action="append",
+    metavar="package",
+    type="package_name",
+    help="Exclude specified package from the output",
+)
+
+
+no_python_version_warning: Callable[..., Option] = partial(
+    Option,
+    "--no-python-version-warning",
+    dest="no_python_version_warning",
+    action="store_true",
+    default=False,
+    help="Silence deprecation warnings for upcoming unsupported Pythons.",
+)
+
+
+use_new_feature: Callable[..., Option] = partial(
+    Option,
+    "--use-feature",
+    dest="features_enabled",
+    metavar="feature",
+    action="append",
+    default=[],
+    choices=[
+        "fast-deps",
+        "truststore",
+        "no-binary-enable-wheel-cache",
+    ],
+    help="Enable new functionality, that may be backward incompatible.",
+)
+
+use_deprecated_feature: Callable[..., Option] = partial(
+    Option,
+    "--use-deprecated",
+    dest="deprecated_features_enabled",
+    metavar="feature",
+    action="append",
+    default=[],
+    choices=[
+        "legacy-resolver",
+    ],
+    help=("Enable deprecated functionality, that will be removed in the future."),
+)
+
+
+##########
+# groups #
+##########
+
+general_group: Dict[str, Any] = {
+    "name": "General Options",
+    "options": [
+        help_,
+        debug_mode,
+        isolated_mode,
+        require_virtualenv,
+        python,
+        verbose,
+        version,
+        quiet,
+        log,
+        no_input,
+        proxy,
+        retries,
+        timeout,
+        exists_action,
+        trusted_host,
+        cert,
+        client_cert,
+        cache_dir,
+        no_cache,
+        disable_pip_version_check,
+        no_color,
+        no_python_version_warning,
+        use_new_feature,
+        use_deprecated_feature,
+    ],
+}
+
+index_group: Dict[str, Any] = {
+    "name": "Package Index Options",
+    "options": [
+        index_url,
+        extra_index_url,
+        no_index,
+        find_links,
+    ],
+}
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/command_context.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/command_context.py
new file mode 100644
index 0000000..139995a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/command_context.py
@@ -0,0 +1,27 @@
+from contextlib import ExitStack, contextmanager
+from typing import ContextManager, Generator, TypeVar
+
+_T = TypeVar("_T", covariant=True)
+
+
+class CommandContextMixIn:
+    def __init__(self) -> None:
+        super().__init__()
+        self._in_main_context = False
+        self._main_context = ExitStack()
+
+    @contextmanager
+    def main_context(self) -> Generator[None, None, None]:
+        assert not self._in_main_context
+
+        self._in_main_context = True
+        try:
+            with self._main_context:
+                yield
+        finally:
+            self._in_main_context = False
+
+    def enter_context(self, context_provider: ContextManager[_T]) -> _T:
+        assert self._in_main_context
+
+        return self._main_context.enter_context(context_provider)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/main.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/main.py
new file mode 100644
index 0000000..0e31221
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/main.py
@@ -0,0 +1,70 @@
+"""Primary application entrypoint.
+"""
+import locale
+import logging
+import os
+import sys
+from typing import List, Optional
+
+from pip._internal.cli.autocompletion import autocomplete
+from pip._internal.cli.main_parser import parse_command
+from pip._internal.commands import create_command
+from pip._internal.exceptions import PipError
+from pip._internal.utils import deprecation
+
+logger = logging.getLogger(__name__)
+
+
+# Do not import and use main() directly! Using it directly is actively
+# discouraged by pip's maintainers. The name, location and behavior of
+# this function is subject to change, so calling it directly is not
+# portable across different pip versions.
+
+# In addition, running pip in-process is unsupported and unsafe. This is
+# elaborated in detail at
+# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
+# That document also provides suggestions that should work for nearly
+# all users that are considering importing and using main() directly.
+
+# However, we know that certain users will still want to invoke pip
+# in-process. If you understand and accept the implications of using pip
+# in an unsupported manner, the best approach is to use runpy to avoid
+# depending on the exact location of this entry point.
+
+# The following example shows how to use runpy to invoke pip in that
+# case:
+#
+#     sys.argv = ["pip", your, args, here]
+#     runpy.run_module("pip", run_name="__main__")
+#
+# Note that this will exit the process after running, unlike a direct
+# call to main. As it is not safe to do any processing after calling
+# main, this should not be an issue in practice.
+
+
+def main(args: Optional[List[str]] = None) -> int:
+    if args is None:
+        args = sys.argv[1:]
+
+    # Configure our deprecation warnings to be sent through loggers
+    deprecation.install_warning_logger()
+
+    autocomplete()
+
+    try:
+        cmd_name, cmd_args = parse_command(args)
+    except PipError as exc:
+        sys.stderr.write(f"ERROR: {exc}")
+        sys.stderr.write(os.linesep)
+        sys.exit(1)
+
+    # Needed for locale.getpreferredencoding(False) to work
+    # in pip._internal.utils.encoding.auto_decode
+    try:
+        locale.setlocale(locale.LC_ALL, "")
+    except locale.Error as e:
+        # setlocale can apparently crash if locale are uninitialized
+        logger.debug("Ignoring error %s when setting locale", e)
+    command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
+
+    return command.main(cmd_args)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/main_parser.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/main_parser.py
new file mode 100644
index 0000000..5ade356
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/main_parser.py
@@ -0,0 +1,134 @@
+"""A single place for constructing and exposing the main parser
+"""
+
+import os
+import subprocess
+import sys
+from typing import List, Optional, Tuple
+
+from pip._internal.build_env import get_runnable_pip
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
+from pip._internal.commands import commands_dict, get_similar_commands
+from pip._internal.exceptions import CommandError
+from pip._internal.utils.misc import get_pip_version, get_prog
+
+__all__ = ["create_main_parser", "parse_command"]
+
+
+def create_main_parser() -> ConfigOptionParser:
+    """Creates and returns the main parser for pip's CLI"""
+
+    parser = ConfigOptionParser(
+        usage="\n%prog <command> [options]",
+        add_help_option=False,
+        formatter=UpdatingDefaultsHelpFormatter(),
+        name="global",
+        prog=get_prog(),
+    )
+    parser.disable_interspersed_args()
+
+    parser.version = get_pip_version()
+
+    # add the general options
+    gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
+    parser.add_option_group(gen_opts)
+
+    # so the help formatter knows
+    parser.main = True  # type: ignore
+
+    # create command listing for description
+    description = [""] + [
+        f"{name:27} {command_info.summary}"
+        for name, command_info in commands_dict.items()
+    ]
+    parser.description = "\n".join(description)
+
+    return parser
+
+
+def identify_python_interpreter(python: str) -> Optional[str]:
+    # If the named file exists, use it.
+    # If it's a directory, assume it's a virtual environment and
+    # look for the environment's Python executable.
+    if os.path.exists(python):
+        if os.path.isdir(python):
+            # bin/python for Unix, Scripts/python.exe for Windows
+            # Try both in case of odd cases like cygwin.
+            for exe in ("bin/python", "Scripts/python.exe"):
+                py = os.path.join(python, exe)
+                if os.path.exists(py):
+                    return py
+        else:
+            return python
+
+    # Could not find the interpreter specified
+    return None
+
+
+def parse_command(args: List[str]) -> Tuple[str, List[str]]:
+    parser = create_main_parser()
+
+    # Note: parser calls disable_interspersed_args(), so the result of this
+    # call is to split the initial args into the general options before the
+    # subcommand and everything else.
+    # For example:
+    #  args: ['--timeout=5', 'install', '--user', 'INITools']
+    #  general_options: ['--timeout==5']
+    #  args_else: ['install', '--user', 'INITools']
+    general_options, args_else = parser.parse_args(args)
+
+    # --python
+    if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
+        # Re-invoke pip using the specified Python interpreter
+        interpreter = identify_python_interpreter(general_options.python)
+        if interpreter is None:
+            raise CommandError(
+                f"Could not locate Python interpreter {general_options.python}"
+            )
+
+        pip_cmd = [
+            interpreter,
+            get_runnable_pip(),
+        ]
+        pip_cmd.extend(args)
+
+        # Set a flag so the child doesn't re-invoke itself, causing
+        # an infinite loop.
+        os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
+        returncode = 0
+        try:
+            proc = subprocess.run(pip_cmd)
+            returncode = proc.returncode
+        except (subprocess.SubprocessError, OSError) as exc:
+            raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
+        sys.exit(returncode)
+
+    # --version
+    if general_options.version:
+        sys.stdout.write(parser.version)
+        sys.stdout.write(os.linesep)
+        sys.exit()
+
+    # pip || pip help -> print_help()
+    if not args_else or (args_else[0] == "help" and len(args_else) == 1):
+        parser.print_help()
+        sys.exit()
+
+    # the subcommand name
+    cmd_name = args_else[0]
+
+    if cmd_name not in commands_dict:
+        guess = get_similar_commands(cmd_name)
+
+        msg = [f'unknown command "{cmd_name}"']
+        if guess:
+            msg.append(f'maybe you meant "{guess}"')
+
+        raise CommandError(" - ".join(msg))
+
+    # all the args without the subcommand
+    cmd_args = args[:]
+    cmd_args.remove(cmd_name)
+
+    return cmd_name, cmd_args
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/parser.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/parser.py
new file mode 100644
index 0000000..c762cf2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/parser.py
@@ -0,0 +1,294 @@
+"""Base option parser setup"""
+
+import logging
+import optparse
+import shutil
+import sys
+import textwrap
+from contextlib import suppress
+from typing import Any, Dict, Generator, List, Tuple
+
+from pip._internal.cli.status_codes import UNKNOWN_ERROR
+from pip._internal.configuration import Configuration, ConfigurationError
+from pip._internal.utils.misc import redact_auth_from_url, strtobool
+
+logger = logging.getLogger(__name__)
+
+
+class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
+    """A prettier/less verbose help formatter for optparse."""
+
+    def __init__(self, *args: Any, **kwargs: Any) -> None:
+        # help position must be aligned with __init__.parseopts.description
+        kwargs["max_help_position"] = 30
+        kwargs["indent_increment"] = 1
+        kwargs["width"] = shutil.get_terminal_size()[0] - 2
+        super().__init__(*args, **kwargs)
+
+    def format_option_strings(self, option: optparse.Option) -> str:
+        return self._format_option_strings(option)
+
+    def _format_option_strings(
+        self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
+    ) -> str:
+        """
+        Return a comma-separated list of option strings and metavars.
+
+        :param option:  tuple of (short opt, long opt), e.g: ('-f', '--format')
+        :param mvarfmt: metavar format string
+        :param optsep:  separator
+        """
+        opts = []
+
+        if option._short_opts:
+            opts.append(option._short_opts[0])
+        if option._long_opts:
+            opts.append(option._long_opts[0])
+        if len(opts) > 1:
+            opts.insert(1, optsep)
+
+        if option.takes_value():
+            assert option.dest is not None
+            metavar = option.metavar or option.dest.lower()
+            opts.append(mvarfmt.format(metavar.lower()))
+
+        return "".join(opts)
+
+    def format_heading(self, heading: str) -> str:
+        if heading == "Options":
+            return ""
+        return heading + ":\n"
+
+    def format_usage(self, usage: str) -> str:
+        """
+        Ensure there is only one newline between usage and the first heading
+        if there is no description.
+        """
+        msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), "  "))
+        return msg
+
+    def format_description(self, description: str) -> str:
+        # leave full control over description to us
+        if description:
+            if hasattr(self.parser, "main"):
+                label = "Commands"
+            else:
+                label = "Description"
+            # some doc strings have initial newlines, some don't
+            description = description.lstrip("\n")
+            # some doc strings have final newlines and spaces, some don't
+            description = description.rstrip()
+            # dedent, then reindent
+            description = self.indent_lines(textwrap.dedent(description), "  ")
+            description = f"{label}:\n{description}\n"
+            return description
+        else:
+            return ""
+
+    def format_epilog(self, epilog: str) -> str:
+        # leave full control over epilog to us
+        if epilog:
+            return epilog
+        else:
+            return ""
+
+    def indent_lines(self, text: str, indent: str) -> str:
+        new_lines = [indent + line for line in text.split("\n")]
+        return "\n".join(new_lines)
+
+
+class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
+    """Custom help formatter for use in ConfigOptionParser.
+
+    This is updates the defaults before expanding them, allowing
+    them to show up correctly in the help listing.
+
+    Also redact auth from url type options
+    """
+
+    def expand_default(self, option: optparse.Option) -> str:
+        default_values = None
+        if self.parser is not None:
+            assert isinstance(self.parser, ConfigOptionParser)
+            self.parser._update_defaults(self.parser.defaults)
+            assert option.dest is not None
+            default_values = self.parser.defaults.get(option.dest)
+        help_text = super().expand_default(option)
+
+        if default_values and option.metavar == "URL":
+            if isinstance(default_values, str):
+                default_values = [default_values]
+
+            # If its not a list, we should abort and just return the help text
+            if not isinstance(default_values, list):
+                default_values = []
+
+            for val in default_values:
+                help_text = help_text.replace(val, redact_auth_from_url(val))
+
+        return help_text
+
+
+class CustomOptionParser(optparse.OptionParser):
+    def insert_option_group(
+        self, idx: int, *args: Any, **kwargs: Any
+    ) -> optparse.OptionGroup:
+        """Insert an OptionGroup at a given position."""
+        group = self.add_option_group(*args, **kwargs)
+
+        self.option_groups.pop()
+        self.option_groups.insert(idx, group)
+
+        return group
+
+    @property
+    def option_list_all(self) -> List[optparse.Option]:
+        """Get a list of all options, including those in option groups."""
+        res = self.option_list[:]
+        for i in self.option_groups:
+            res.extend(i.option_list)
+
+        return res
+
+
+class ConfigOptionParser(CustomOptionParser):
+    """Custom option parser which updates its defaults by checking the
+    configuration files and environmental variables"""
+
+    def __init__(
+        self,
+        *args: Any,
+        name: str,
+        isolated: bool = False,
+        **kwargs: Any,
+    ) -> None:
+        self.name = name
+        self.config = Configuration(isolated)
+
+        assert self.name
+        super().__init__(*args, **kwargs)
+
+    def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
+        try:
+            return option.check_value(key, val)
+        except optparse.OptionValueError as exc:
+            print(f"An error occurred during configuration: {exc}")
+            sys.exit(3)
+
+    def _get_ordered_configuration_items(
+        self,
+    ) -> Generator[Tuple[str, Any], None, None]:
+        # Configuration gives keys in an unordered manner. Order them.
+        override_order = ["global", self.name, ":env:"]
+
+        # Pool the options into different groups
+        section_items: Dict[str, List[Tuple[str, Any]]] = {
+            name: [] for name in override_order
+        }
+        for section_key, val in self.config.items():
+            # ignore empty values
+            if not val:
+                logger.debug(
+                    "Ignoring configuration key '%s' as it's value is empty.",
+                    section_key,
+                )
+                continue
+
+            section, key = section_key.split(".", 1)
+            if section in override_order:
+                section_items[section].append((key, val))
+
+        # Yield each group in their override order
+        for section in override_order:
+            for key, val in section_items[section]:
+                yield key, val
+
+    def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
+        """Updates the given defaults with values from the config files and
+        the environ. Does a little special handling for certain types of
+        options (lists)."""
+
+        # Accumulate complex default state.
+        self.values = optparse.Values(self.defaults)
+        late_eval = set()
+        # Then set the options with those values
+        for key, val in self._get_ordered_configuration_items():
+            # '--' because configuration supports only long names
+            option = self.get_option("--" + key)
+
+            # Ignore options not present in this parser. E.g. non-globals put
+            # in [global] by users that want them to apply to all applicable
+            # commands.
+            if option is None:
+                continue
+
+            assert option.dest is not None
+
+            if option.action in ("store_true", "store_false"):
+                try:
+                    val = strtobool(val)
+                except ValueError:
+                    self.error(
+                        "{} is not a valid value for {} option, "  # noqa
+                        "please specify a boolean value like yes/no, "
+                        "true/false or 1/0 instead.".format(val, key)
+                    )
+            elif option.action == "count":
+                with suppress(ValueError):
+                    val = strtobool(val)
+                with suppress(ValueError):
+                    val = int(val)
+                if not isinstance(val, int) or val < 0:
+                    self.error(
+                        "{} is not a valid value for {} option, "  # noqa
+                        "please instead specify either a non-negative integer "
+                        "or a boolean value like yes/no or false/true "
+                        "which is equivalent to 1/0.".format(val, key)
+                    )
+            elif option.action == "append":
+                val = val.split()
+                val = [self.check_default(option, key, v) for v in val]
+            elif option.action == "callback":
+                assert option.callback is not None
+                late_eval.add(option.dest)
+                opt_str = option.get_opt_string()
+                val = option.convert_value(opt_str, val)
+                # From take_action
+                args = option.callback_args or ()
+                kwargs = option.callback_kwargs or {}
+                option.callback(option, opt_str, val, self, *args, **kwargs)
+            else:
+                val = self.check_default(option, key, val)
+
+            defaults[option.dest] = val
+
+        for key in late_eval:
+            defaults[key] = getattr(self.values, key)
+        self.values = None
+        return defaults
+
+    def get_default_values(self) -> optparse.Values:
+        """Overriding to make updating the defaults after instantiation of
+        the option parser possible, _update_defaults() does the dirty work."""
+        if not self.process_default_values:
+            # Old, pre-Optik 1.5 behaviour.
+            return optparse.Values(self.defaults)
+
+        # Load the configuration, or error out in case of an error
+        try:
+            self.config.load()
+        except ConfigurationError as err:
+            self.exit(UNKNOWN_ERROR, str(err))
+
+        defaults = self._update_defaults(self.defaults.copy())  # ours
+        for option in self._get_all_options():
+            assert option.dest is not None
+            default = defaults.get(option.dest)
+            if isinstance(default, str):
+                opt_str = option.get_opt_string()
+                defaults[option.dest] = option.check_value(opt_str, default)
+        return optparse.Values(defaults)
+
+    def error(self, msg: str) -> None:
+        self.print_usage(sys.stderr)
+        self.exit(UNKNOWN_ERROR, f"{msg}\n")
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/progress_bars.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/progress_bars.py
new file mode 100644
index 0000000..0ad1403
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/progress_bars.py
@@ -0,0 +1,68 @@
+import functools
+from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
+
+from pip._vendor.rich.progress import (
+    BarColumn,
+    DownloadColumn,
+    FileSizeColumn,
+    Progress,
+    ProgressColumn,
+    SpinnerColumn,
+    TextColumn,
+    TimeElapsedColumn,
+    TimeRemainingColumn,
+    TransferSpeedColumn,
+)
+
+from pip._internal.utils.logging import get_indentation
+
+DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
+
+
+def _rich_progress_bar(
+    iterable: Iterable[bytes],
+    *,
+    bar_type: str,
+    size: int,
+) -> Generator[bytes, None, None]:
+    assert bar_type == "on", "This should only be used in the default mode."
+
+    if not size:
+        total = float("inf")
+        columns: Tuple[ProgressColumn, ...] = (
+            TextColumn("[progress.description]{task.description}"),
+            SpinnerColumn("line", speed=1.5),
+            FileSizeColumn(),
+            TransferSpeedColumn(),
+            TimeElapsedColumn(),
+        )
+    else:
+        total = size
+        columns = (
+            TextColumn("[progress.description]{task.description}"),
+            BarColumn(),
+            DownloadColumn(),
+            TransferSpeedColumn(),
+            TextColumn("eta"),
+            TimeRemainingColumn(),
+        )
+
+    progress = Progress(*columns, refresh_per_second=30)
+    task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
+    with progress:
+        for chunk in iterable:
+            yield chunk
+            progress.update(task_id, advance=len(chunk))
+
+
+def get_download_progress_renderer(
+    *, bar_type: str, size: Optional[int] = None
+) -> DownloadProgressRenderer:
+    """Get an object that can be used to render the download progress.
+
+    Returns a callable, that takes an iterable to "wrap".
+    """
+    if bar_type == "on":
+        return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
+    else:
+        return iter  # no-op, when passed an iterator
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/req_command.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/req_command.py
new file mode 100644
index 0000000..1044809
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/req_command.py
@@ -0,0 +1,502 @@
+"""Contains the Command base classes that depend on PipSession.
+
+The classes in this module are in a separate module so the commands not
+needing download / PackageFinder capability don't unnecessarily import the
+PackageFinder machinery and all its vendored dependencies, etc.
+"""
+
+import logging
+import os
+import sys
+from functools import partial
+from optparse import Values
+from typing import TYPE_CHECKING, Any, List, Optional, Tuple
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.command_context import CommandContextMixIn
+from pip._internal.exceptions import CommandError, PreviousBuildDirError
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.network.session import PipSession
+from pip._internal.operations.build.build_tracker import BuildTracker
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.constructors import (
+    install_req_from_editable,
+    install_req_from_line,
+    install_req_from_parsed_requirement,
+    install_req_from_req_string,
+)
+from pip._internal.req.req_file import parse_requirements
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.resolution.base import BaseResolver
+from pip._internal.self_outdated_check import pip_self_version_check
+from pip._internal.utils.temp_dir import (
+    TempDirectory,
+    TempDirectoryTypeRegistry,
+    tempdir_kinds,
+)
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+if TYPE_CHECKING:
+    from ssl import SSLContext
+
+logger = logging.getLogger(__name__)
+
+
+def _create_truststore_ssl_context() -> Optional["SSLContext"]:
+    if sys.version_info < (3, 10):
+        raise CommandError("The truststore feature is only available for Python 3.10+")
+
+    try:
+        import ssl
+    except ImportError:
+        logger.warning("Disabling truststore since ssl support is missing")
+        return None
+
+    try:
+        import truststore
+    except ImportError:
+        raise CommandError(
+            "To use the truststore feature, 'truststore' must be installed into "
+            "pip's current environment."
+        )
+
+    return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+
+
+class SessionCommandMixin(CommandContextMixIn):
+
+    """
+    A class mixin for command classes needing _build_session().
+    """
+
+    def __init__(self) -> None:
+        super().__init__()
+        self._session: Optional[PipSession] = None
+
+    @classmethod
+    def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
+        """Return a list of index urls from user-provided options."""
+        index_urls = []
+        if not getattr(options, "no_index", False):
+            url = getattr(options, "index_url", None)
+            if url:
+                index_urls.append(url)
+        urls = getattr(options, "extra_index_urls", None)
+        if urls:
+            index_urls.extend(urls)
+        # Return None rather than an empty list
+        return index_urls or None
+
+    def get_default_session(self, options: Values) -> PipSession:
+        """Get a default-managed session."""
+        if self._session is None:
+            self._session = self.enter_context(self._build_session(options))
+            # there's no type annotation on requests.Session, so it's
+            # automatically ContextManager[Any] and self._session becomes Any,
+            # then https://github.com/python/mypy/issues/7696 kicks in
+            assert self._session is not None
+        return self._session
+
+    def _build_session(
+        self,
+        options: Values,
+        retries: Optional[int] = None,
+        timeout: Optional[int] = None,
+        fallback_to_certifi: bool = False,
+    ) -> PipSession:
+        cache_dir = options.cache_dir
+        assert not cache_dir or os.path.isabs(cache_dir)
+
+        if "truststore" in options.features_enabled:
+            try:
+                ssl_context = _create_truststore_ssl_context()
+            except Exception:
+                if not fallback_to_certifi:
+                    raise
+                ssl_context = None
+        else:
+            ssl_context = None
+
+        session = PipSession(
+            cache=os.path.join(cache_dir, "http") if cache_dir else None,
+            retries=retries if retries is not None else options.retries,
+            trusted_hosts=options.trusted_hosts,
+            index_urls=self._get_index_urls(options),
+            ssl_context=ssl_context,
+        )
+
+        # Handle custom ca-bundles from the user
+        if options.cert:
+            session.verify = options.cert
+
+        # Handle SSL client certificate
+        if options.client_cert:
+            session.cert = options.client_cert
+
+        # Handle timeouts
+        if options.timeout or timeout:
+            session.timeout = timeout if timeout is not None else options.timeout
+
+        # Handle configured proxies
+        if options.proxy:
+            session.proxies = {
+                "http": options.proxy,
+                "https": options.proxy,
+            }
+
+        # Determine if we can prompt the user for authentication or not
+        session.auth.prompting = not options.no_input
+
+        return session
+
+
+class IndexGroupCommand(Command, SessionCommandMixin):
+
+    """
+    Abstract base class for commands with the index_group options.
+
+    This also corresponds to the commands that permit the pip version check.
+    """
+
+    def handle_pip_version_check(self, options: Values) -> None:
+        """
+        Do the pip version check if not disabled.
+
+        This overrides the default behavior of not doing the check.
+        """
+        # Make sure the index_group options are present.
+        assert hasattr(options, "no_index")
+
+        if options.disable_pip_version_check or options.no_index:
+            return
+
+        # Otherwise, check if we're using the latest version of pip available.
+        session = self._build_session(
+            options,
+            retries=0,
+            timeout=min(5, options.timeout),
+            # This is set to ensure the function does not fail when truststore is
+            # specified in use-feature but cannot be loaded. This usually raises a
+            # CommandError and shows a nice user-facing error, but this function is not
+            # called in that try-except block.
+            fallback_to_certifi=True,
+        )
+        with session:
+            pip_self_version_check(session, options)
+
+
+KEEPABLE_TEMPDIR_TYPES = [
+    tempdir_kinds.BUILD_ENV,
+    tempdir_kinds.EPHEM_WHEEL_CACHE,
+    tempdir_kinds.REQ_BUILD,
+]
+
+
+def warn_if_run_as_root() -> None:
+    """Output a warning for sudo users on Unix.
+
+    In a virtual environment, sudo pip still writes to virtualenv.
+    On Windows, users may run pip as Administrator without issues.
+    This warning only applies to Unix root users outside of virtualenv.
+    """
+    if running_under_virtualenv():
+        return
+    if not hasattr(os, "getuid"):
+        return
+    # On Windows, there are no "system managed" Python packages. Installing as
+    # Administrator via pip is the correct way of updating system environments.
+    #
+    # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
+    # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
+    if sys.platform == "win32" or sys.platform == "cygwin":
+        return
+
+    if os.getuid() != 0:
+        return
+
+    logger.warning(
+        "Running pip as the 'root' user can result in broken permissions and "
+        "conflicting behaviour with the system package manager. "
+        "It is recommended to use a virtual environment instead: "
+        "https://pip.pypa.io/warnings/venv"
+    )
+
+
+def with_cleanup(func: Any) -> Any:
+    """Decorator for common logic related to managing temporary
+    directories.
+    """
+
+    def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
+        for t in KEEPABLE_TEMPDIR_TYPES:
+            registry.set_delete(t, False)
+
+    def wrapper(
+        self: RequirementCommand, options: Values, args: List[Any]
+    ) -> Optional[int]:
+        assert self.tempdir_registry is not None
+        if options.no_clean:
+            configure_tempdir_registry(self.tempdir_registry)
+
+        try:
+            return func(self, options, args)
+        except PreviousBuildDirError:
+            # This kind of conflict can occur when the user passes an explicit
+            # build directory with a pre-existing folder. In that case we do
+            # not want to accidentally remove it.
+            configure_tempdir_registry(self.tempdir_registry)
+            raise
+
+    return wrapper
+
+
+class RequirementCommand(IndexGroupCommand):
+    def __init__(self, *args: Any, **kw: Any) -> None:
+        super().__init__(*args, **kw)
+
+        self.cmd_opts.add_option(cmdoptions.no_clean())
+
+    @staticmethod
+    def determine_resolver_variant(options: Values) -> str:
+        """Determines which resolver should be used, based on the given options."""
+        if "legacy-resolver" in options.deprecated_features_enabled:
+            return "legacy"
+
+        return "2020-resolver"
+
+    @classmethod
+    def make_requirement_preparer(
+        cls,
+        temp_build_dir: TempDirectory,
+        options: Values,
+        build_tracker: BuildTracker,
+        session: PipSession,
+        finder: PackageFinder,
+        use_user_site: bool,
+        download_dir: Optional[str] = None,
+        verbosity: int = 0,
+    ) -> RequirementPreparer:
+        """
+        Create a RequirementPreparer instance for the given parameters.
+        """
+        temp_build_dir_path = temp_build_dir.path
+        assert temp_build_dir_path is not None
+
+        resolver_variant = cls.determine_resolver_variant(options)
+        if resolver_variant == "2020-resolver":
+            lazy_wheel = "fast-deps" in options.features_enabled
+            if lazy_wheel:
+                logger.warning(
+                    "pip is using lazily downloaded wheels using HTTP "
+                    "range requests to obtain dependency information. "
+                    "This experimental feature is enabled through "
+                    "--use-feature=fast-deps and it is not ready for "
+                    "production."
+                )
+        else:
+            lazy_wheel = False
+            if "fast-deps" in options.features_enabled:
+                logger.warning(
+                    "fast-deps has no effect when used with the legacy resolver."
+                )
+
+        return RequirementPreparer(
+            build_dir=temp_build_dir_path,
+            src_dir=options.src_dir,
+            download_dir=download_dir,
+            build_isolation=options.build_isolation,
+            check_build_deps=options.check_build_deps,
+            build_tracker=build_tracker,
+            session=session,
+            progress_bar=options.progress_bar,
+            finder=finder,
+            require_hashes=options.require_hashes,
+            use_user_site=use_user_site,
+            lazy_wheel=lazy_wheel,
+            verbosity=verbosity,
+        )
+
+    @classmethod
+    def make_resolver(
+        cls,
+        preparer: RequirementPreparer,
+        finder: PackageFinder,
+        options: Values,
+        wheel_cache: Optional[WheelCache] = None,
+        use_user_site: bool = False,
+        ignore_installed: bool = True,
+        ignore_requires_python: bool = False,
+        force_reinstall: bool = False,
+        upgrade_strategy: str = "to-satisfy-only",
+        use_pep517: Optional[bool] = None,
+        py_version_info: Optional[Tuple[int, ...]] = None,
+    ) -> BaseResolver:
+        """
+        Create a Resolver instance for the given parameters.
+        """
+        make_install_req = partial(
+            install_req_from_req_string,
+            isolated=options.isolated_mode,
+            use_pep517=use_pep517,
+            config_settings=getattr(options, "config_settings", None),
+        )
+        resolver_variant = cls.determine_resolver_variant(options)
+        # The long import name and duplicated invocation is needed to convince
+        # Mypy into correctly typechecking. Otherwise it would complain the
+        # "Resolver" class being redefined.
+        if resolver_variant == "2020-resolver":
+            import pip._internal.resolution.resolvelib.resolver
+
+            return pip._internal.resolution.resolvelib.resolver.Resolver(
+                preparer=preparer,
+                finder=finder,
+                wheel_cache=wheel_cache,
+                make_install_req=make_install_req,
+                use_user_site=use_user_site,
+                ignore_dependencies=options.ignore_dependencies,
+                ignore_installed=ignore_installed,
+                ignore_requires_python=ignore_requires_python,
+                force_reinstall=force_reinstall,
+                upgrade_strategy=upgrade_strategy,
+                py_version_info=py_version_info,
+            )
+        import pip._internal.resolution.legacy.resolver
+
+        return pip._internal.resolution.legacy.resolver.Resolver(
+            preparer=preparer,
+            finder=finder,
+            wheel_cache=wheel_cache,
+            make_install_req=make_install_req,
+            use_user_site=use_user_site,
+            ignore_dependencies=options.ignore_dependencies,
+            ignore_installed=ignore_installed,
+            ignore_requires_python=ignore_requires_python,
+            force_reinstall=force_reinstall,
+            upgrade_strategy=upgrade_strategy,
+            py_version_info=py_version_info,
+        )
+
+    def get_requirements(
+        self,
+        args: List[str],
+        options: Values,
+        finder: PackageFinder,
+        session: PipSession,
+    ) -> List[InstallRequirement]:
+        """
+        Parse command-line arguments into the corresponding requirements.
+        """
+        requirements: List[InstallRequirement] = []
+        for filename in options.constraints:
+            for parsed_req in parse_requirements(
+                filename,
+                constraint=True,
+                finder=finder,
+                options=options,
+                session=session,
+            ):
+                req_to_add = install_req_from_parsed_requirement(
+                    parsed_req,
+                    isolated=options.isolated_mode,
+                    user_supplied=False,
+                )
+                requirements.append(req_to_add)
+
+        for req in args:
+            req_to_add = install_req_from_line(
+                req,
+                None,
+                isolated=options.isolated_mode,
+                use_pep517=options.use_pep517,
+                user_supplied=True,
+                config_settings=getattr(options, "config_settings", None),
+            )
+            requirements.append(req_to_add)
+
+        for req in options.editables:
+            req_to_add = install_req_from_editable(
+                req,
+                user_supplied=True,
+                isolated=options.isolated_mode,
+                use_pep517=options.use_pep517,
+                config_settings=getattr(options, "config_settings", None),
+            )
+            requirements.append(req_to_add)
+
+        # NOTE: options.require_hashes may be set if --require-hashes is True
+        for filename in options.requirements:
+            for parsed_req in parse_requirements(
+                filename, finder=finder, options=options, session=session
+            ):
+                req_to_add = install_req_from_parsed_requirement(
+                    parsed_req,
+                    isolated=options.isolated_mode,
+                    use_pep517=options.use_pep517,
+                    user_supplied=True,
+                )
+                requirements.append(req_to_add)
+
+        # If any requirement has hash options, enable hash checking.
+        if any(req.has_hash_options for req in requirements):
+            options.require_hashes = True
+
+        if not (args or options.editables or options.requirements):
+            opts = {"name": self.name}
+            if options.find_links:
+                raise CommandError(
+                    "You must give at least one requirement to {name} "
+                    '(maybe you meant "pip {name} {links}"?)'.format(
+                        **dict(opts, links=" ".join(options.find_links))
+                    )
+                )
+            else:
+                raise CommandError(
+                    "You must give at least one requirement to {name} "
+                    '(see "pip help {name}")'.format(**opts)
+                )
+
+        return requirements
+
+    @staticmethod
+    def trace_basic_info(finder: PackageFinder) -> None:
+        """
+        Trace basic information about the provided objects.
+        """
+        # Display where finder is looking for packages
+        search_scope = finder.search_scope
+        locations = search_scope.get_formatted_locations()
+        if locations:
+            logger.info(locations)
+
+    def _build_package_finder(
+        self,
+        options: Values,
+        session: PipSession,
+        target_python: Optional[TargetPython] = None,
+        ignore_requires_python: Optional[bool] = None,
+    ) -> PackageFinder:
+        """
+        Create a package finder appropriate to this requirement command.
+
+        :param ignore_requires_python: Whether to ignore incompatible
+            "Requires-Python" values in links. Defaults to False.
+        """
+        link_collector = LinkCollector.create(session, options=options)
+        selection_prefs = SelectionPreferences(
+            allow_yanked=True,
+            format_control=options.format_control,
+            allow_all_prereleases=options.pre,
+            prefer_binary=options.prefer_binary,
+            ignore_requires_python=ignore_requires_python,
+        )
+
+        return PackageFinder.create(
+            link_collector=link_collector,
+            selection_prefs=selection_prefs,
+            target_python=target_python,
+        )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/spinners.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/spinners.py
new file mode 100644
index 0000000..cf2b976
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/spinners.py
@@ -0,0 +1,159 @@
+import contextlib
+import itertools
+import logging
+import sys
+import time
+from typing import IO, Generator, Optional
+
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.logging import get_indentation
+
+logger = logging.getLogger(__name__)
+
+
+class SpinnerInterface:
+    def spin(self) -> None:
+        raise NotImplementedError()
+
+    def finish(self, final_status: str) -> None:
+        raise NotImplementedError()
+
+
+class InteractiveSpinner(SpinnerInterface):
+    def __init__(
+        self,
+        message: str,
+        file: Optional[IO[str]] = None,
+        spin_chars: str = "-\\|/",
+        # Empirically, 8 updates/second looks nice
+        min_update_interval_seconds: float = 0.125,
+    ):
+        self._message = message
+        if file is None:
+            file = sys.stdout
+        self._file = file
+        self._rate_limiter = RateLimiter(min_update_interval_seconds)
+        self._finished = False
+
+        self._spin_cycle = itertools.cycle(spin_chars)
+
+        self._file.write(" " * get_indentation() + self._message + " ... ")
+        self._width = 0
+
+    def _write(self, status: str) -> None:
+        assert not self._finished
+        # Erase what we wrote before by backspacing to the beginning, writing
+        # spaces to overwrite the old text, and then backspacing again
+        backup = "\b" * self._width
+        self._file.write(backup + " " * self._width + backup)
+        # Now we have a blank slate to add our status
+        self._file.write(status)
+        self._width = len(status)
+        self._file.flush()
+        self._rate_limiter.reset()
+
+    def spin(self) -> None:
+        if self._finished:
+            return
+        if not self._rate_limiter.ready():
+            return
+        self._write(next(self._spin_cycle))
+
+    def finish(self, final_status: str) -> None:
+        if self._finished:
+            return
+        self._write(final_status)
+        self._file.write("\n")
+        self._file.flush()
+        self._finished = True
+
+
+# Used for dumb terminals, non-interactive installs (no tty), etc.
+# We still print updates occasionally (once every 60 seconds by default) to
+# act as a keep-alive for systems like Travis-CI that take lack-of-output as
+# an indication that a task has frozen.
+class NonInteractiveSpinner(SpinnerInterface):
+    def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
+        self._message = message
+        self._finished = False
+        self._rate_limiter = RateLimiter(min_update_interval_seconds)
+        self._update("started")
+
+    def _update(self, status: str) -> None:
+        assert not self._finished
+        self._rate_limiter.reset()
+        logger.info("%s: %s", self._message, status)
+
+    def spin(self) -> None:
+        if self._finished:
+            return
+        if not self._rate_limiter.ready():
+            return
+        self._update("still running...")
+
+    def finish(self, final_status: str) -> None:
+        if self._finished:
+            return
+        self._update(f"finished with status '{final_status}'")
+        self._finished = True
+
+
+class RateLimiter:
+    def __init__(self, min_update_interval_seconds: float) -> None:
+        self._min_update_interval_seconds = min_update_interval_seconds
+        self._last_update: float = 0
+
+    def ready(self) -> bool:
+        now = time.time()
+        delta = now - self._last_update
+        return delta >= self._min_update_interval_seconds
+
+    def reset(self) -> None:
+        self._last_update = time.time()
+
+
+@contextlib.contextmanager
+def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
+    # Interactive spinner goes directly to sys.stdout rather than being routed
+    # through the logging system, but it acts like it has level INFO,
+    # i.e. it's only displayed if we're at level INFO or better.
+    # Non-interactive spinner goes through the logging system, so it is always
+    # in sync with logging configuration.
+    if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
+        spinner: SpinnerInterface = InteractiveSpinner(message)
+    else:
+        spinner = NonInteractiveSpinner(message)
+    try:
+        with hidden_cursor(sys.stdout):
+            yield spinner
+    except KeyboardInterrupt:
+        spinner.finish("canceled")
+        raise
+    except Exception:
+        spinner.finish("error")
+        raise
+    else:
+        spinner.finish("done")
+
+
+HIDE_CURSOR = "\x1b[?25l"
+SHOW_CURSOR = "\x1b[?25h"
+
+
+@contextlib.contextmanager
+def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
+    # The Windows terminal does not support the hide/show cursor ANSI codes,
+    # even via colorama. So don't even try.
+    if WINDOWS:
+        yield
+    # We don't want to clutter the output with control characters if we're
+    # writing to a file, or if the user is running with --quiet.
+    # See https://github.com/pypa/pip/issues/3418
+    elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
+        yield
+    else:
+        file.write(HIDE_CURSOR)
+        try:
+            yield
+        finally:
+            file.write(SHOW_CURSOR)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/cli/status_codes.py b/venv/lib/python3.9/site-packages/pip/_internal/cli/status_codes.py
new file mode 100644
index 0000000..5e29502
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/cli/status_codes.py
@@ -0,0 +1,6 @@
+SUCCESS = 0
+ERROR = 1
+UNKNOWN_ERROR = 2
+VIRTUALENV_NOT_FOUND = 3
+PREVIOUS_BUILD_DIR_ERROR = 4
+NO_MATCHES_FOUND = 23
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/__init__.py
new file mode 100644
index 0000000..858a410
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__init__.py
@@ -0,0 +1,132 @@
+"""
+Package containing all pip commands
+"""
+
+import importlib
+from collections import namedtuple
+from typing import Any, Dict, Optional
+
+from pip._internal.cli.base_command import Command
+
+CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
+
+# This dictionary does a bunch of heavy lifting for help output:
+# - Enables avoiding additional (costly) imports for presenting `--help`.
+# - The ordering matters for help display.
+#
+# Even though the module path starts with the same "pip._internal.commands"
+# prefix, the full path makes testing easier (specifically when modifying
+# `commands_dict` in test setup / teardown).
+commands_dict: Dict[str, CommandInfo] = {
+    "install": CommandInfo(
+        "pip._internal.commands.install",
+        "InstallCommand",
+        "Install packages.",
+    ),
+    "download": CommandInfo(
+        "pip._internal.commands.download",
+        "DownloadCommand",
+        "Download packages.",
+    ),
+    "uninstall": CommandInfo(
+        "pip._internal.commands.uninstall",
+        "UninstallCommand",
+        "Uninstall packages.",
+    ),
+    "freeze": CommandInfo(
+        "pip._internal.commands.freeze",
+        "FreezeCommand",
+        "Output installed packages in requirements format.",
+    ),
+    "inspect": CommandInfo(
+        "pip._internal.commands.inspect",
+        "InspectCommand",
+        "Inspect the python environment.",
+    ),
+    "list": CommandInfo(
+        "pip._internal.commands.list",
+        "ListCommand",
+        "List installed packages.",
+    ),
+    "show": CommandInfo(
+        "pip._internal.commands.show",
+        "ShowCommand",
+        "Show information about installed packages.",
+    ),
+    "check": CommandInfo(
+        "pip._internal.commands.check",
+        "CheckCommand",
+        "Verify installed packages have compatible dependencies.",
+    ),
+    "config": CommandInfo(
+        "pip._internal.commands.configuration",
+        "ConfigurationCommand",
+        "Manage local and global configuration.",
+    ),
+    "search": CommandInfo(
+        "pip._internal.commands.search",
+        "SearchCommand",
+        "Search PyPI for packages.",
+    ),
+    "cache": CommandInfo(
+        "pip._internal.commands.cache",
+        "CacheCommand",
+        "Inspect and manage pip's wheel cache.",
+    ),
+    "index": CommandInfo(
+        "pip._internal.commands.index",
+        "IndexCommand",
+        "Inspect information available from package indexes.",
+    ),
+    "wheel": CommandInfo(
+        "pip._internal.commands.wheel",
+        "WheelCommand",
+        "Build wheels from your requirements.",
+    ),
+    "hash": CommandInfo(
+        "pip._internal.commands.hash",
+        "HashCommand",
+        "Compute hashes of package archives.",
+    ),
+    "completion": CommandInfo(
+        "pip._internal.commands.completion",
+        "CompletionCommand",
+        "A helper command used for command completion.",
+    ),
+    "debug": CommandInfo(
+        "pip._internal.commands.debug",
+        "DebugCommand",
+        "Show information useful for debugging.",
+    ),
+    "help": CommandInfo(
+        "pip._internal.commands.help",
+        "HelpCommand",
+        "Show help for commands.",
+    ),
+}
+
+
+def create_command(name: str, **kwargs: Any) -> Command:
+    """
+    Create an instance of the Command class with the given name.
+    """
+    module_path, class_name, summary = commands_dict[name]
+    module = importlib.import_module(module_path)
+    command_class = getattr(module, class_name)
+    command = command_class(name=name, summary=summary, **kwargs)
+
+    return command
+
+
+def get_similar_commands(name: str) -> Optional[str]:
+    """Command name auto-correct."""
+    from difflib import get_close_matches
+
+    name = name.lower()
+
+    close_commands = get_close_matches(name, commands_dict.keys())
+
+    if close_commands:
+        return close_commands[0]
+    else:
+        return None
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..3e675e2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/cache.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/cache.cpython-39.pyc
new file mode 100644
index 0000000..fbf43fb
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/cache.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/check.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/check.cpython-39.pyc
new file mode 100644
index 0000000..4642cda
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/check.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/completion.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/completion.cpython-39.pyc
new file mode 100644
index 0000000..f4d77df
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/completion.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-39.pyc
new file mode 100644
index 0000000..8bde5e9
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/debug.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/debug.cpython-39.pyc
new file mode 100644
index 0000000..a8c83d8
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/debug.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/download.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/download.cpython-39.pyc
new file mode 100644
index 0000000..9e81759
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/download.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-39.pyc
new file mode 100644
index 0000000..1d3f18e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/hash.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/hash.cpython-39.pyc
new file mode 100644
index 0000000..5a2d21e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/hash.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/help.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/help.cpython-39.pyc
new file mode 100644
index 0000000..f2ef195
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/help.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/index.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/index.cpython-39.pyc
new file mode 100644
index 0000000..730f1ca
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/index.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-39.pyc
new file mode 100644
index 0000000..78c889d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/install.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/install.cpython-39.pyc
new file mode 100644
index 0000000..8698167
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/install.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/list.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/list.cpython-39.pyc
new file mode 100644
index 0000000..f889cab
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/list.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/search.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/search.cpython-39.pyc
new file mode 100644
index 0000000..26bf6d5
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/search.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/show.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/show.cpython-39.pyc
new file mode 100644
index 0000000..101d1e4
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/show.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc
new file mode 100644
index 0000000..453230f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-39.pyc
new file mode 100644
index 0000000..088b40a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/cache.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/cache.py
new file mode 100644
index 0000000..c5f0330
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/cache.py
@@ -0,0 +1,223 @@
+import os
+import textwrap
+from optparse import Values
+from typing import Any, List
+
+import pip._internal.utils.filesystem as filesystem
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.exceptions import CommandError, PipError
+from pip._internal.utils.logging import getLogger
+
+logger = getLogger(__name__)
+
+
+class CacheCommand(Command):
+    """
+    Inspect and manage pip's wheel cache.
+
+    Subcommands:
+
+    - dir: Show the cache directory.
+    - info: Show information about the cache.
+    - list: List filenames of packages stored in the cache.
+    - remove: Remove one or more package from the cache.
+    - purge: Remove all items from the cache.
+
+    ``<pattern>`` can be a glob expression or a package name.
+    """
+
+    ignore_require_venv = True
+    usage = """
+        %prog dir
+        %prog info
+        %prog list [<pattern>] [--format=[human, abspath]]
+        %prog remove <pattern>
+        %prog purge
+    """
+
+    def add_options(self) -> None:
+
+        self.cmd_opts.add_option(
+            "--format",
+            action="store",
+            dest="list_format",
+            default="human",
+            choices=("human", "abspath"),
+            help="Select the output format among: human (default) or abspath",
+        )
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        handlers = {
+            "dir": self.get_cache_dir,
+            "info": self.get_cache_info,
+            "list": self.list_cache_items,
+            "remove": self.remove_cache_items,
+            "purge": self.purge_cache,
+        }
+
+        if not options.cache_dir:
+            logger.error("pip cache commands can not function since cache is disabled.")
+            return ERROR
+
+        # Determine action
+        if not args or args[0] not in handlers:
+            logger.error(
+                "Need an action (%s) to perform.",
+                ", ".join(sorted(handlers)),
+            )
+            return ERROR
+
+        action = args[0]
+
+        # Error handling happens here, not in the action-handlers.
+        try:
+            handlers[action](options, args[1:])
+        except PipError as e:
+            logger.error(e.args[0])
+            return ERROR
+
+        return SUCCESS
+
+    def get_cache_dir(self, options: Values, args: List[Any]) -> None:
+        if args:
+            raise CommandError("Too many arguments")
+
+        logger.info(options.cache_dir)
+
+    def get_cache_info(self, options: Values, args: List[Any]) -> None:
+        if args:
+            raise CommandError("Too many arguments")
+
+        num_http_files = len(self._find_http_files(options))
+        num_packages = len(self._find_wheels(options, "*"))
+
+        http_cache_location = self._cache_dir(options, "http")
+        wheels_cache_location = self._cache_dir(options, "wheels")
+        http_cache_size = filesystem.format_directory_size(http_cache_location)
+        wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
+
+        message = (
+            textwrap.dedent(
+                """
+                    Package index page cache location: {http_cache_location}
+                    Package index page cache size: {http_cache_size}
+                    Number of HTTP files: {num_http_files}
+                    Locally built wheels location: {wheels_cache_location}
+                    Locally built wheels size: {wheels_cache_size}
+                    Number of locally built wheels: {package_count}
+                """
+            )
+            .format(
+                http_cache_location=http_cache_location,
+                http_cache_size=http_cache_size,
+                num_http_files=num_http_files,
+                wheels_cache_location=wheels_cache_location,
+                package_count=num_packages,
+                wheels_cache_size=wheels_cache_size,
+            )
+            .strip()
+        )
+
+        logger.info(message)
+
+    def list_cache_items(self, options: Values, args: List[Any]) -> None:
+        if len(args) > 1:
+            raise CommandError("Too many arguments")
+
+        if args:
+            pattern = args[0]
+        else:
+            pattern = "*"
+
+        files = self._find_wheels(options, pattern)
+        if options.list_format == "human":
+            self.format_for_human(files)
+        else:
+            self.format_for_abspath(files)
+
+    def format_for_human(self, files: List[str]) -> None:
+        if not files:
+            logger.info("No locally built wheels cached.")
+            return
+
+        results = []
+        for filename in files:
+            wheel = os.path.basename(filename)
+            size = filesystem.format_file_size(filename)
+            results.append(f" - {wheel} ({size})")
+        logger.info("Cache contents:\n")
+        logger.info("\n".join(sorted(results)))
+
+    def format_for_abspath(self, files: List[str]) -> None:
+        if not files:
+            return
+
+        results = []
+        for filename in files:
+            results.append(filename)
+
+        logger.info("\n".join(sorted(results)))
+
+    def remove_cache_items(self, options: Values, args: List[Any]) -> None:
+        if len(args) > 1:
+            raise CommandError("Too many arguments")
+
+        if not args:
+            raise CommandError("Please provide a pattern")
+
+        files = self._find_wheels(options, args[0])
+
+        no_matching_msg = "No matching packages"
+        if args[0] == "*":
+            # Only fetch http files if no specific pattern given
+            files += self._find_http_files(options)
+        else:
+            # Add the pattern to the log message
+            no_matching_msg += ' for pattern "{}"'.format(args[0])
+
+        if not files:
+            logger.warning(no_matching_msg)
+
+        for filename in files:
+            os.unlink(filename)
+            logger.verbose("Removed %s", filename)
+        logger.info("Files removed: %s", len(files))
+
+    def purge_cache(self, options: Values, args: List[Any]) -> None:
+        if args:
+            raise CommandError("Too many arguments")
+
+        return self.remove_cache_items(options, ["*"])
+
+    def _cache_dir(self, options: Values, subdir: str) -> str:
+        return os.path.join(options.cache_dir, subdir)
+
+    def _find_http_files(self, options: Values) -> List[str]:
+        http_dir = self._cache_dir(options, "http")
+        return filesystem.find_files(http_dir, "*")
+
+    def _find_wheels(self, options: Values, pattern: str) -> List[str]:
+        wheel_dir = self._cache_dir(options, "wheels")
+
+        # The wheel filename format, as specified in PEP 427, is:
+        #     {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
+        #
+        # Additionally, non-alphanumeric values in the distribution are
+        # normalized to underscores (_), meaning hyphens can never occur
+        # before `-{version}`.
+        #
+        # Given that information:
+        # - If the pattern we're given contains a hyphen (-), the user is
+        #   providing at least the version. Thus, we can just append `*.whl`
+        #   to match the rest of it.
+        # - If the pattern we're given doesn't contain a hyphen (-), the
+        #   user is only providing the name. Thus, we append `-*.whl` to
+        #   match the hyphen before the version, followed by anything else.
+        #
+        # PEP 427: https://www.python.org/dev/peps/pep-0427/
+        pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
+
+        return filesystem.find_files(wheel_dir, pattern)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/check.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/check.py
new file mode 100644
index 0000000..3864220
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/check.py
@@ -0,0 +1,53 @@
+import logging
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.operations.check import (
+    check_package_set,
+    create_package_set_from_installed,
+)
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class CheckCommand(Command):
+    """Verify installed packages have compatible dependencies."""
+
+    usage = """
+      %prog [options]"""
+
+    def run(self, options: Values, args: List[str]) -> int:
+
+        package_set, parsing_probs = create_package_set_from_installed()
+        missing, conflicting = check_package_set(package_set)
+
+        for project_name in missing:
+            version = package_set[project_name].version
+            for dependency in missing[project_name]:
+                write_output(
+                    "%s %s requires %s, which is not installed.",
+                    project_name,
+                    version,
+                    dependency[0],
+                )
+
+        for project_name in conflicting:
+            version = package_set[project_name].version
+            for dep_name, dep_version, req in conflicting[project_name]:
+                write_output(
+                    "%s %s has requirement %s, but you have %s %s.",
+                    project_name,
+                    version,
+                    req,
+                    dep_name,
+                    dep_version,
+                )
+
+        if missing or conflicting or parsing_probs:
+            return ERROR
+        else:
+            write_output("No broken requirements found.")
+            return SUCCESS
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/completion.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/completion.py
new file mode 100644
index 0000000..deaa308
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/completion.py
@@ -0,0 +1,126 @@
+import sys
+import textwrap
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.utils.misc import get_prog
+
+BASE_COMPLETION = """
+# pip {shell} completion start{script}# pip {shell} completion end
+"""
+
+COMPLETION_SCRIPTS = {
+    "bash": """
+        _pip_completion()
+        {{
+            COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
+                           COMP_CWORD=$COMP_CWORD \\
+                           PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
+        }}
+        complete -o default -F _pip_completion {prog}
+    """,
+    "zsh": """
+        function _pip_completion {{
+          local words cword
+          read -Ac words
+          read -cn cword
+          reply=( $( COMP_WORDS="$words[*]" \\
+                     COMP_CWORD=$(( cword-1 )) \\
+                     PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
+        }}
+        compctl -K _pip_completion {prog}
+    """,
+    "fish": """
+        function __fish_complete_pip
+            set -lx COMP_WORDS (commandline -o) ""
+            set -lx COMP_CWORD ( \\
+                math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
+            )
+            set -lx PIP_AUTO_COMPLETE 1
+            string split \\  -- (eval $COMP_WORDS[1])
+        end
+        complete -fa "(__fish_complete_pip)" -c {prog}
+    """,
+    "powershell": """
+        if ((Test-Path Function:\\TabExpansion) -and -not `
+            (Test-Path Function:\\_pip_completeBackup)) {{
+            Rename-Item Function:\\TabExpansion _pip_completeBackup
+        }}
+        function TabExpansion($line, $lastWord) {{
+            $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
+            if ($lastBlock.StartsWith("{prog} ")) {{
+                $Env:COMP_WORDS=$lastBlock
+                $Env:COMP_CWORD=$lastBlock.Split().Length - 1
+                $Env:PIP_AUTO_COMPLETE=1
+                (& {prog}).Split()
+                Remove-Item Env:COMP_WORDS
+                Remove-Item Env:COMP_CWORD
+                Remove-Item Env:PIP_AUTO_COMPLETE
+            }}
+            elseif (Test-Path Function:\\_pip_completeBackup) {{
+                # Fall back on existing tab expansion
+                _pip_completeBackup $line $lastWord
+            }}
+        }}
+    """,
+}
+
+
+class CompletionCommand(Command):
+    """A helper command to be used for command completion."""
+
+    ignore_require_venv = True
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "--bash",
+            "-b",
+            action="store_const",
+            const="bash",
+            dest="shell",
+            help="Emit completion code for bash",
+        )
+        self.cmd_opts.add_option(
+            "--zsh",
+            "-z",
+            action="store_const",
+            const="zsh",
+            dest="shell",
+            help="Emit completion code for zsh",
+        )
+        self.cmd_opts.add_option(
+            "--fish",
+            "-f",
+            action="store_const",
+            const="fish",
+            dest="shell",
+            help="Emit completion code for fish",
+        )
+        self.cmd_opts.add_option(
+            "--powershell",
+            "-p",
+            action="store_const",
+            const="powershell",
+            dest="shell",
+            help="Emit completion code for powershell",
+        )
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        """Prints the completion code of the given shell"""
+        shells = COMPLETION_SCRIPTS.keys()
+        shell_options = ["--" + shell for shell in sorted(shells)]
+        if options.shell in shells:
+            script = textwrap.dedent(
+                COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
+            )
+            print(BASE_COMPLETION.format(script=script, shell=options.shell))
+            return SUCCESS
+        else:
+            sys.stderr.write(
+                "ERROR: You must pass {}\n".format(" or ".join(shell_options))
+            )
+            return SUCCESS
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/configuration.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/configuration.py
new file mode 100644
index 0000000..84b134e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/configuration.py
@@ -0,0 +1,282 @@
+import logging
+import os
+import subprocess
+from optparse import Values
+from typing import Any, List, Optional
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.configuration import (
+    Configuration,
+    Kind,
+    get_configuration_files,
+    kinds,
+)
+from pip._internal.exceptions import PipError
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import get_prog, write_output
+
+logger = logging.getLogger(__name__)
+
+
+class ConfigurationCommand(Command):
+    """
+    Manage local and global configuration.
+
+    Subcommands:
+
+    - list: List the active configuration (or from the file specified)
+    - edit: Edit the configuration file in an editor
+    - get: Get the value associated with command.option
+    - set: Set the command.option=value
+    - unset: Unset the value associated with command.option
+    - debug: List the configuration files and values defined under them
+
+    Configuration keys should be dot separated command and option name,
+    with the special prefix "global" affecting any command. For example,
+    "pip config set global.index-url https://example.org/" would configure
+    the index url for all commands, but "pip config set download.timeout 10"
+    would configure a 10 second timeout only for "pip download" commands.
+
+    If none of --user, --global and --site are passed, a virtual
+    environment configuration file is used if one is active and the file
+    exists. Otherwise, all modifications happen to the user file by
+    default.
+    """
+
+    ignore_require_venv = True
+    usage = """
+        %prog [<file-option>] list
+        %prog [<file-option>] [--editor <editor-path>] edit
+
+        %prog [<file-option>] get command.option
+        %prog [<file-option>] set command.option value
+        %prog [<file-option>] unset command.option
+        %prog [<file-option>] debug
+    """
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "--editor",
+            dest="editor",
+            action="store",
+            default=None,
+            help=(
+                "Editor to use to edit the file. Uses VISUAL or EDITOR "
+                "environment variables if not provided."
+            ),
+        )
+
+        self.cmd_opts.add_option(
+            "--global",
+            dest="global_file",
+            action="store_true",
+            default=False,
+            help="Use the system-wide configuration file only",
+        )
+
+        self.cmd_opts.add_option(
+            "--user",
+            dest="user_file",
+            action="store_true",
+            default=False,
+            help="Use the user configuration file only",
+        )
+
+        self.cmd_opts.add_option(
+            "--site",
+            dest="site_file",
+            action="store_true",
+            default=False,
+            help="Use the current environment configuration file only",
+        )
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        handlers = {
+            "list": self.list_values,
+            "edit": self.open_in_editor,
+            "get": self.get_name,
+            "set": self.set_name_value,
+            "unset": self.unset_name,
+            "debug": self.list_config_values,
+        }
+
+        # Determine action
+        if not args or args[0] not in handlers:
+            logger.error(
+                "Need an action (%s) to perform.",
+                ", ".join(sorted(handlers)),
+            )
+            return ERROR
+
+        action = args[0]
+
+        # Determine which configuration files are to be loaded
+        #    Depends on whether the command is modifying.
+        try:
+            load_only = self._determine_file(
+                options, need_value=(action in ["get", "set", "unset", "edit"])
+            )
+        except PipError as e:
+            logger.error(e.args[0])
+            return ERROR
+
+        # Load a new configuration
+        self.configuration = Configuration(
+            isolated=options.isolated_mode, load_only=load_only
+        )
+        self.configuration.load()
+
+        # Error handling happens here, not in the action-handlers.
+        try:
+            handlers[action](options, args[1:])
+        except PipError as e:
+            logger.error(e.args[0])
+            return ERROR
+
+        return SUCCESS
+
+    def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
+        file_options = [
+            key
+            for key, value in (
+                (kinds.USER, options.user_file),
+                (kinds.GLOBAL, options.global_file),
+                (kinds.SITE, options.site_file),
+            )
+            if value
+        ]
+
+        if not file_options:
+            if not need_value:
+                return None
+            # Default to user, unless there's a site file.
+            elif any(
+                os.path.exists(site_config_file)
+                for site_config_file in get_configuration_files()[kinds.SITE]
+            ):
+                return kinds.SITE
+            else:
+                return kinds.USER
+        elif len(file_options) == 1:
+            return file_options[0]
+
+        raise PipError(
+            "Need exactly one file to operate upon "
+            "(--user, --site, --global) to perform."
+        )
+
+    def list_values(self, options: Values, args: List[str]) -> None:
+        self._get_n_args(args, "list", n=0)
+
+        for key, value in sorted(self.configuration.items()):
+            write_output("%s=%r", key, value)
+
+    def get_name(self, options: Values, args: List[str]) -> None:
+        key = self._get_n_args(args, "get [name]", n=1)
+        value = self.configuration.get_value(key)
+
+        write_output("%s", value)
+
+    def set_name_value(self, options: Values, args: List[str]) -> None:
+        key, value = self._get_n_args(args, "set [name] [value]", n=2)
+        self.configuration.set_value(key, value)
+
+        self._save_configuration()
+
+    def unset_name(self, options: Values, args: List[str]) -> None:
+        key = self._get_n_args(args, "unset [name]", n=1)
+        self.configuration.unset_value(key)
+
+        self._save_configuration()
+
+    def list_config_values(self, options: Values, args: List[str]) -> None:
+        """List config key-value pairs across different config files"""
+        self._get_n_args(args, "debug", n=0)
+
+        self.print_env_var_values()
+        # Iterate over config files and print if they exist, and the
+        # key-value pairs present in them if they do
+        for variant, files in sorted(self.configuration.iter_config_files()):
+            write_output("%s:", variant)
+            for fname in files:
+                with indent_log():
+                    file_exists = os.path.exists(fname)
+                    write_output("%s, exists: %r", fname, file_exists)
+                    if file_exists:
+                        self.print_config_file_values(variant)
+
+    def print_config_file_values(self, variant: Kind) -> None:
+        """Get key-value pairs from the file of a variant"""
+        for name, value in self.configuration.get_values_in_config(variant).items():
+            with indent_log():
+                write_output("%s: %s", name, value)
+
+    def print_env_var_values(self) -> None:
+        """Get key-values pairs present as environment variables"""
+        write_output("%s:", "env_var")
+        with indent_log():
+            for key, value in sorted(self.configuration.get_environ_vars()):
+                env_var = f"PIP_{key.upper()}"
+                write_output("%s=%r", env_var, value)
+
+    def open_in_editor(self, options: Values, args: List[str]) -> None:
+        editor = self._determine_editor(options)
+
+        fname = self.configuration.get_file_to_edit()
+        if fname is None:
+            raise PipError("Could not determine appropriate file.")
+        elif '"' in fname:
+            # This shouldn't happen, unless we see a username like that.
+            # If that happens, we'd appreciate a pull request fixing this.
+            raise PipError(
+                f'Can not open an editor for a file name containing "\n{fname}'
+            )
+
+        try:
+            subprocess.check_call(f'{editor} "{fname}"', shell=True)
+        except FileNotFoundError as e:
+            if not e.filename:
+                e.filename = editor
+            raise
+        except subprocess.CalledProcessError as e:
+            raise PipError(
+                "Editor Subprocess exited with exit code {}".format(e.returncode)
+            )
+
+    def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
+        """Helper to make sure the command got the right number of arguments"""
+        if len(args) != n:
+            msg = (
+                "Got unexpected number of arguments, expected {}. "
+                '(example: "{} config {}")'
+            ).format(n, get_prog(), example)
+            raise PipError(msg)
+
+        if n == 1:
+            return args[0]
+        else:
+            return args
+
+    def _save_configuration(self) -> None:
+        # We successfully ran a modifying command. Need to save the
+        # configuration.
+        try:
+            self.configuration.save()
+        except Exception:
+            logger.exception(
+                "Unable to save configuration. Please report this as a bug."
+            )
+            raise PipError("Internal Error.")
+
+    def _determine_editor(self, options: Values) -> str:
+        if options.editor is not None:
+            return options.editor
+        elif "VISUAL" in os.environ:
+            return os.environ["VISUAL"]
+        elif "EDITOR" in os.environ:
+            return os.environ["EDITOR"]
+        else:
+            raise PipError("Could not determine editor to use.")
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/debug.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/debug.py
new file mode 100644
index 0000000..6fad1fe
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/debug.py
@@ -0,0 +1,199 @@
+import importlib.resources
+import locale
+import logging
+import os
+import sys
+from optparse import Values
+from types import ModuleType
+from typing import Any, Dict, List, Optional
+
+import pip._vendor
+from pip._vendor.certifi import where
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.configuration import Configuration
+from pip._internal.metadata import get_environment
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import get_pip_version
+
+logger = logging.getLogger(__name__)
+
+
+def show_value(name: str, value: Any) -> None:
+    logger.info("%s: %s", name, value)
+
+
+def show_sys_implementation() -> None:
+    logger.info("sys.implementation:")
+    implementation_name = sys.implementation.name
+    with indent_log():
+        show_value("name", implementation_name)
+
+
+def create_vendor_txt_map() -> Dict[str, str]:
+    with importlib.resources.open_text("pip._vendor", "vendor.txt") as f:
+        # Purge non version specifying lines.
+        # Also, remove any space prefix or suffixes (including comments).
+        lines = [
+            line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
+        ]
+
+    # Transform into "module" -> version dict.
+    return dict(line.split("==", 1) for line in lines)
+
+
+def get_module_from_module_name(module_name: str) -> ModuleType:
+    # Module name can be uppercase in vendor.txt for some reason...
+    module_name = module_name.lower()
+    # PATCH: setuptools is actually only pkg_resources.
+    if module_name == "setuptools":
+        module_name = "pkg_resources"
+
+    __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
+    return getattr(pip._vendor, module_name)
+
+
+def get_vendor_version_from_module(module_name: str) -> Optional[str]:
+    module = get_module_from_module_name(module_name)
+    version = getattr(module, "__version__", None)
+
+    if not version:
+        # Try to find version in debundled module info.
+        assert module.__file__ is not None
+        env = get_environment([os.path.dirname(module.__file__)])
+        dist = env.get_distribution(module_name)
+        if dist:
+            version = str(dist.version)
+
+    return version
+
+
+def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
+    """Log the actual version and print extra info if there is
+    a conflict or if the actual version could not be imported.
+    """
+    for module_name, expected_version in vendor_txt_versions.items():
+        extra_message = ""
+        actual_version = get_vendor_version_from_module(module_name)
+        if not actual_version:
+            extra_message = (
+                " (Unable to locate actual module version, using"
+                " vendor.txt specified version)"
+            )
+            actual_version = expected_version
+        elif parse_version(actual_version) != parse_version(expected_version):
+            extra_message = (
+                " (CONFLICT: vendor.txt suggests version should"
+                " be {})".format(expected_version)
+            )
+        logger.info("%s==%s%s", module_name, actual_version, extra_message)
+
+
+def show_vendor_versions() -> None:
+    logger.info("vendored library versions:")
+
+    vendor_txt_versions = create_vendor_txt_map()
+    with indent_log():
+        show_actual_vendor_versions(vendor_txt_versions)
+
+
+def show_tags(options: Values) -> None:
+    tag_limit = 10
+
+    target_python = make_target_python(options)
+    tags = target_python.get_tags()
+
+    # Display the target options that were explicitly provided.
+    formatted_target = target_python.format_given()
+    suffix = ""
+    if formatted_target:
+        suffix = f" (target: {formatted_target})"
+
+    msg = "Compatible tags: {}{}".format(len(tags), suffix)
+    logger.info(msg)
+
+    if options.verbose < 1 and len(tags) > tag_limit:
+        tags_limited = True
+        tags = tags[:tag_limit]
+    else:
+        tags_limited = False
+
+    with indent_log():
+        for tag in tags:
+            logger.info(str(tag))
+
+        if tags_limited:
+            msg = (
+                "...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
+            ).format(tag_limit=tag_limit)
+            logger.info(msg)
+
+
+def ca_bundle_info(config: Configuration) -> str:
+    levels = set()
+    for key, _ in config.items():
+        levels.add(key.split(".")[0])
+
+    if not levels:
+        return "Not specified"
+
+    levels_that_override_global = ["install", "wheel", "download"]
+    global_overriding_level = [
+        level for level in levels if level in levels_that_override_global
+    ]
+    if not global_overriding_level:
+        return "global"
+
+    if "global" in levels:
+        levels.remove("global")
+    return ", ".join(levels)
+
+
+class DebugCommand(Command):
+    """
+    Display debug information.
+    """
+
+    usage = """
+      %prog <options>"""
+    ignore_require_venv = True
+
+    def add_options(self) -> None:
+        cmdoptions.add_target_python_options(self.cmd_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+        self.parser.config.load()
+
+    def run(self, options: Values, args: List[str]) -> int:
+        logger.warning(
+            "This command is only meant for debugging. "
+            "Do not use this with automation for parsing and getting these "
+            "details, since the output and options of this command may "
+            "change without notice."
+        )
+        show_value("pip version", get_pip_version())
+        show_value("sys.version", sys.version)
+        show_value("sys.executable", sys.executable)
+        show_value("sys.getdefaultencoding", sys.getdefaultencoding())
+        show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
+        show_value(
+            "locale.getpreferredencoding",
+            locale.getpreferredencoding(),
+        )
+        show_value("sys.platform", sys.platform)
+        show_sys_implementation()
+
+        show_value("'cert' config value", ca_bundle_info(self.parser.config))
+        show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
+        show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
+        show_value("pip._vendor.certifi.where()", where())
+        show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
+
+        show_vendor_versions()
+
+        show_tags(options)
+
+        return SUCCESS
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/download.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/download.py
new file mode 100644
index 0000000..4132e08
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/download.py
@@ -0,0 +1,149 @@
+import logging
+import os
+from optparse import Values
+from typing import List
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.req_command import RequirementCommand, with_cleanup
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.operations.build.build_tracker import get_build_tracker
+from pip._internal.req.req_install import (
+    LegacySetupPyOptionsCheckMode,
+    check_legacy_setup_py_options,
+)
+from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+class DownloadCommand(RequirementCommand):
+    """
+    Download packages from:
+
+    - PyPI (and other indexes) using requirement specifiers.
+    - VCS project urls.
+    - Local project directories.
+    - Local or remote source archives.
+
+    pip also supports downloading from "requirements files", which provide
+    an easy way to specify a whole environment to be downloaded.
+    """
+
+    usage = """
+      %prog [options] <requirement specifier> [package-index-options] ...
+      %prog [options] -r <requirements file> [package-index-options] ...
+      %prog [options] <vcs project url> ...
+      %prog [options] <local project path> ...
+      %prog [options] <archive url/path> ..."""
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(cmdoptions.constraints())
+        self.cmd_opts.add_option(cmdoptions.requirements())
+        self.cmd_opts.add_option(cmdoptions.no_deps())
+        self.cmd_opts.add_option(cmdoptions.global_options())
+        self.cmd_opts.add_option(cmdoptions.no_binary())
+        self.cmd_opts.add_option(cmdoptions.only_binary())
+        self.cmd_opts.add_option(cmdoptions.prefer_binary())
+        self.cmd_opts.add_option(cmdoptions.src())
+        self.cmd_opts.add_option(cmdoptions.pre())
+        self.cmd_opts.add_option(cmdoptions.require_hashes())
+        self.cmd_opts.add_option(cmdoptions.progress_bar())
+        self.cmd_opts.add_option(cmdoptions.no_build_isolation())
+        self.cmd_opts.add_option(cmdoptions.use_pep517())
+        self.cmd_opts.add_option(cmdoptions.no_use_pep517())
+        self.cmd_opts.add_option(cmdoptions.check_build_deps())
+        self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+
+        self.cmd_opts.add_option(
+            "-d",
+            "--dest",
+            "--destination-dir",
+            "--destination-directory",
+            dest="download_dir",
+            metavar="dir",
+            default=os.curdir,
+            help="Download packages into <dir>.",
+        )
+
+        cmdoptions.add_target_python_options(self.cmd_opts)
+
+        index_opts = cmdoptions.make_option_group(
+            cmdoptions.index_group,
+            self.parser,
+        )
+
+        self.parser.insert_option_group(0, index_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    @with_cleanup
+    def run(self, options: Values, args: List[str]) -> int:
+
+        options.ignore_installed = True
+        # editable doesn't really make sense for `pip download`, but the bowels
+        # of the RequirementSet code require that property.
+        options.editables = []
+
+        cmdoptions.check_dist_restriction(options)
+
+        options.download_dir = normalize_path(options.download_dir)
+        ensure_dir(options.download_dir)
+
+        session = self.get_default_session(options)
+
+        target_python = make_target_python(options)
+        finder = self._build_package_finder(
+            options=options,
+            session=session,
+            target_python=target_python,
+            ignore_requires_python=options.ignore_requires_python,
+        )
+
+        build_tracker = self.enter_context(get_build_tracker())
+
+        directory = TempDirectory(
+            delete=not options.no_clean,
+            kind="download",
+            globally_managed=True,
+        )
+
+        reqs = self.get_requirements(args, options, finder, session)
+        check_legacy_setup_py_options(
+            options, reqs, LegacySetupPyOptionsCheckMode.DOWNLOAD
+        )
+
+        preparer = self.make_requirement_preparer(
+            temp_build_dir=directory,
+            options=options,
+            build_tracker=build_tracker,
+            session=session,
+            finder=finder,
+            download_dir=options.download_dir,
+            use_user_site=False,
+            verbosity=self.verbosity,
+        )
+
+        resolver = self.make_resolver(
+            preparer=preparer,
+            finder=finder,
+            options=options,
+            ignore_requires_python=options.ignore_requires_python,
+            use_pep517=options.use_pep517,
+            py_version_info=options.python_version,
+        )
+
+        self.trace_basic_info(finder)
+
+        requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
+
+        downloaded: List[str] = []
+        for req in requirement_set.requirements.values():
+            if req.satisfied_by is None:
+                assert req.name is not None
+                preparer.save_linked_requirement(req)
+                downloaded.append(req.name)
+        if downloaded:
+            write_output("Successfully downloaded %s", " ".join(downloaded))
+
+        return SUCCESS
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/freeze.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/freeze.py
new file mode 100644
index 0000000..5fa6d39
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/freeze.py
@@ -0,0 +1,97 @@
+import sys
+from optparse import Values
+from typing import List
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.operations.freeze import freeze
+from pip._internal.utils.compat import stdlib_pkgs
+
+DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"}
+
+
+class FreezeCommand(Command):
+    """
+    Output installed packages in requirements format.
+
+    packages are listed in a case-insensitive sorted order.
+    """
+
+    usage = """
+      %prog [options]"""
+    log_streams = ("ext://sys.stderr", "ext://sys.stderr")
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-r",
+            "--requirement",
+            dest="requirements",
+            action="append",
+            default=[],
+            metavar="file",
+            help=(
+                "Use the order in the given requirements file and its "
+                "comments when generating output. This option can be "
+                "used multiple times."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "-l",
+            "--local",
+            dest="local",
+            action="store_true",
+            default=False,
+            help=(
+                "If in a virtualenv that has global access, do not output "
+                "globally-installed packages."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "--user",
+            dest="user",
+            action="store_true",
+            default=False,
+            help="Only output packages installed in user-site.",
+        )
+        self.cmd_opts.add_option(cmdoptions.list_path())
+        self.cmd_opts.add_option(
+            "--all",
+            dest="freeze_all",
+            action="store_true",
+            help=(
+                "Do not skip these packages in the output:"
+                " {}".format(", ".join(DEV_PKGS))
+            ),
+        )
+        self.cmd_opts.add_option(
+            "--exclude-editable",
+            dest="exclude_editable",
+            action="store_true",
+            help="Exclude editable package from output.",
+        )
+        self.cmd_opts.add_option(cmdoptions.list_exclude())
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        skip = set(stdlib_pkgs)
+        if not options.freeze_all:
+            skip.update(DEV_PKGS)
+
+        if options.excludes:
+            skip.update(options.excludes)
+
+        cmdoptions.check_list_path_option(options)
+
+        for line in freeze(
+            requirement=options.requirements,
+            local_only=options.local,
+            user_only=options.user,
+            paths=options.path,
+            isolated=options.isolated_mode,
+            skip=skip,
+            exclude_editable=options.exclude_editable,
+        ):
+            sys.stdout.write(line + "\n")
+        return SUCCESS
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/hash.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/hash.py
new file mode 100644
index 0000000..042dac8
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/hash.py
@@ -0,0 +1,59 @@
+import hashlib
+import logging
+import sys
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
+from pip._internal.utils.misc import read_chunks, write_output
+
+logger = logging.getLogger(__name__)
+
+
+class HashCommand(Command):
+    """
+    Compute a hash of a local package archive.
+
+    These can be used with --hash in a requirements file to do repeatable
+    installs.
+    """
+
+    usage = "%prog [options] <file> ..."
+    ignore_require_venv = True
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-a",
+            "--algorithm",
+            dest="algorithm",
+            choices=STRONG_HASHES,
+            action="store",
+            default=FAVORITE_HASH,
+            help="The hash algorithm to use: one of {}".format(
+                ", ".join(STRONG_HASHES)
+            ),
+        )
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        if not args:
+            self.parser.print_usage(sys.stderr)
+            return ERROR
+
+        algorithm = options.algorithm
+        for path in args:
+            write_output(
+                "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
+            )
+        return SUCCESS
+
+
+def _hash_of_file(path: str, algorithm: str) -> str:
+    """Return the hash digest of a file."""
+    with open(path, "rb") as archive:
+        hash = hashlib.new(algorithm)
+        for chunk in read_chunks(archive):
+            hash.update(chunk)
+    return hash.hexdigest()
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/help.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/help.py
new file mode 100644
index 0000000..6206631
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/help.py
@@ -0,0 +1,41 @@
+from optparse import Values
+from typing import List
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+
+
+class HelpCommand(Command):
+    """Show help for commands"""
+
+    usage = """
+      %prog <command>"""
+    ignore_require_venv = True
+
+    def run(self, options: Values, args: List[str]) -> int:
+        from pip._internal.commands import (
+            commands_dict,
+            create_command,
+            get_similar_commands,
+        )
+
+        try:
+            # 'pip help' with no args is handled by pip.__init__.parseopt()
+            cmd_name = args[0]  # the command we need help for
+        except IndexError:
+            return SUCCESS
+
+        if cmd_name not in commands_dict:
+            guess = get_similar_commands(cmd_name)
+
+            msg = [f'unknown command "{cmd_name}"']
+            if guess:
+                msg.append(f'maybe you meant "{guess}"')
+
+            raise CommandError(" - ".join(msg))
+
+        command = create_command(cmd_name)
+        command.parser.print_help()
+
+        return SUCCESS
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/index.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/index.py
new file mode 100644
index 0000000..b4bf0ac
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/index.py
@@ -0,0 +1,138 @@
+import logging
+from optparse import Values
+from typing import Any, Iterable, List, Optional, Union
+
+from pip._vendor.packaging.version import LegacyVersion, Version
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import IndexGroupCommand
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.commands.search import print_dist_installation_info
+from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.network.session import PipSession
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class IndexCommand(IndexGroupCommand):
+    """
+    Inspect information available from package indexes.
+    """
+
+    usage = """
+        %prog versions <package>
+    """
+
+    def add_options(self) -> None:
+        cmdoptions.add_target_python_options(self.cmd_opts)
+
+        self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+        self.cmd_opts.add_option(cmdoptions.pre())
+        self.cmd_opts.add_option(cmdoptions.no_binary())
+        self.cmd_opts.add_option(cmdoptions.only_binary())
+
+        index_opts = cmdoptions.make_option_group(
+            cmdoptions.index_group,
+            self.parser,
+        )
+
+        self.parser.insert_option_group(0, index_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        handlers = {
+            "versions": self.get_available_package_versions,
+        }
+
+        logger.warning(
+            "pip index is currently an experimental command. "
+            "It may be removed/changed in a future release "
+            "without prior warning."
+        )
+
+        # Determine action
+        if not args or args[0] not in handlers:
+            logger.error(
+                "Need an action (%s) to perform.",
+                ", ".join(sorted(handlers)),
+            )
+            return ERROR
+
+        action = args[0]
+
+        # Error handling happens here, not in the action-handlers.
+        try:
+            handlers[action](options, args[1:])
+        except PipError as e:
+            logger.error(e.args[0])
+            return ERROR
+
+        return SUCCESS
+
+    def _build_package_finder(
+        self,
+        options: Values,
+        session: PipSession,
+        target_python: Optional[TargetPython] = None,
+        ignore_requires_python: Optional[bool] = None,
+    ) -> PackageFinder:
+        """
+        Create a package finder appropriate to the index command.
+        """
+        link_collector = LinkCollector.create(session, options=options)
+
+        # Pass allow_yanked=False to ignore yanked versions.
+        selection_prefs = SelectionPreferences(
+            allow_yanked=False,
+            allow_all_prereleases=options.pre,
+            ignore_requires_python=ignore_requires_python,
+        )
+
+        return PackageFinder.create(
+            link_collector=link_collector,
+            selection_prefs=selection_prefs,
+            target_python=target_python,
+        )
+
+    def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
+        if len(args) != 1:
+            raise CommandError("You need to specify exactly one argument")
+
+        target_python = cmdoptions.make_target_python(options)
+        query = args[0]
+
+        with self._build_session(options) as session:
+            finder = self._build_package_finder(
+                options=options,
+                session=session,
+                target_python=target_python,
+                ignore_requires_python=options.ignore_requires_python,
+            )
+
+            versions: Iterable[Union[LegacyVersion, Version]] = (
+                candidate.version for candidate in finder.find_all_candidates(query)
+            )
+
+            if not options.pre:
+                # Remove prereleases
+                versions = (
+                    version for version in versions if not version.is_prerelease
+                )
+            versions = set(versions)
+
+            if not versions:
+                raise DistributionNotFound(
+                    "No matching distribution found for {}".format(query)
+                )
+
+            formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
+            latest = formatted_versions[0]
+
+        write_output("{} ({})".format(query, latest))
+        write_output("Available versions: {}".format(", ".join(formatted_versions)))
+        print_dist_installation_info(query, latest)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/inspect.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/inspect.py
new file mode 100644
index 0000000..a4e3599
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/inspect.py
@@ -0,0 +1,97 @@
+import logging
+from optparse import Values
+from typing import Any, Dict, List
+
+from pip._vendor.packaging.markers import default_environment
+from pip._vendor.rich import print_json
+
+from pip import __version__
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.metadata import BaseDistribution, get_environment
+from pip._internal.utils.compat import stdlib_pkgs
+from pip._internal.utils.urls import path_to_url
+
+logger = logging.getLogger(__name__)
+
+
+class InspectCommand(Command):
+    """
+    Inspect the content of a Python environment and produce a report in JSON format.
+    """
+
+    ignore_require_venv = True
+    usage = """
+      %prog [options]"""
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "--local",
+            action="store_true",
+            default=False,
+            help=(
+                "If in a virtualenv that has global access, do not list "
+                "globally-installed packages."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "--user",
+            dest="user",
+            action="store_true",
+            default=False,
+            help="Only output packages installed in user-site.",
+        )
+        self.cmd_opts.add_option(cmdoptions.list_path())
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        logger.warning(
+            "pip inspect is currently an experimental command. "
+            "The output format may change in a future release without prior warning."
+        )
+
+        cmdoptions.check_list_path_option(options)
+        dists = get_environment(options.path).iter_installed_distributions(
+            local_only=options.local,
+            user_only=options.user,
+            skip=set(stdlib_pkgs),
+        )
+        output = {
+            "version": "0",
+            "pip_version": __version__,
+            "installed": [self._dist_to_dict(dist) for dist in dists],
+            "environment": default_environment(),
+            # TODO tags? scheme?
+        }
+        print_json(data=output)
+        return SUCCESS
+
+    def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]:
+        res: Dict[str, Any] = {
+            "metadata": dist.metadata_dict,
+            "metadata_location": dist.info_location,
+        }
+        # direct_url. Note that we don't have download_info (as in the installation
+        # report) since it is not recorded in installed metadata.
+        direct_url = dist.direct_url
+        if direct_url is not None:
+            res["direct_url"] = direct_url.to_dict()
+        else:
+            # Emulate direct_url for legacy editable installs.
+            editable_project_location = dist.editable_project_location
+            if editable_project_location is not None:
+                res["direct_url"] = {
+                    "url": path_to_url(editable_project_location),
+                    "dir_info": {
+                        "editable": True,
+                    },
+                }
+        # installer
+        installer = dist.installer
+        if dist.installer:
+            res["installer"] = installer
+        # requested
+        if dist.installed_with_dist_info:
+            res["requested"] = dist.requested
+        return res
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/install.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/install.py
new file mode 100644
index 0000000..e081c27
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/install.py
@@ -0,0 +1,860 @@
+import errno
+import json
+import operator
+import os
+import shutil
+import site
+from optparse import SUPPRESS_HELP, Values
+from typing import Iterable, List, Optional
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.rich import print_json
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.req_command import (
+    RequirementCommand,
+    warn_if_run_as_root,
+    with_cleanup,
+)
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.exceptions import CommandError, InstallationError
+from pip._internal.locations import get_scheme
+from pip._internal.metadata import get_environment
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.installation_report import InstallationReport
+from pip._internal.operations.build.build_tracker import get_build_tracker
+from pip._internal.operations.check import ConflictDetails, check_install_conflicts
+from pip._internal.req import install_given_reqs
+from pip._internal.req.req_install import (
+    InstallRequirement,
+    LegacySetupPyOptionsCheckMode,
+    check_legacy_setup_py_options,
+)
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.deprecation import (
+    LegacyInstallReasonFailedBdistWheel,
+    deprecated,
+)
+from pip._internal.utils.distutils_args import parse_distutils_args
+from pip._internal.utils.filesystem import test_writable_dir
+from pip._internal.utils.logging import getLogger
+from pip._internal.utils.misc import (
+    ensure_dir,
+    get_pip_version,
+    protect_pip_from_modification_on_windows,
+    write_output,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.virtualenv import (
+    running_under_virtualenv,
+    virtualenv_no_global,
+)
+from pip._internal.wheel_builder import (
+    BdistWheelAllowedPredicate,
+    build,
+    should_build_for_install_command,
+)
+
+logger = getLogger(__name__)
+
+
+def get_check_bdist_wheel_allowed(
+    format_control: FormatControl,
+) -> BdistWheelAllowedPredicate:
+    def check_binary_allowed(req: InstallRequirement) -> bool:
+        canonical_name = canonicalize_name(req.name or "")
+        allowed_formats = format_control.get_allowed_formats(canonical_name)
+        return "binary" in allowed_formats
+
+    return check_binary_allowed
+
+
+class InstallCommand(RequirementCommand):
+    """
+    Install packages from:
+
+    - PyPI (and other indexes) using requirement specifiers.
+    - VCS project urls.
+    - Local project directories.
+    - Local or remote source archives.
+
+    pip also supports installing from "requirements files", which provide
+    an easy way to specify a whole environment to be installed.
+    """
+
+    usage = """
+      %prog [options] <requirement specifier> [package-index-options] ...
+      %prog [options] -r <requirements file> [package-index-options] ...
+      %prog [options] [-e] <vcs project url> ...
+      %prog [options] [-e] <local project path> ...
+      %prog [options] <archive url/path> ..."""
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(cmdoptions.requirements())
+        self.cmd_opts.add_option(cmdoptions.constraints())
+        self.cmd_opts.add_option(cmdoptions.no_deps())
+        self.cmd_opts.add_option(cmdoptions.pre())
+
+        self.cmd_opts.add_option(cmdoptions.editable())
+        self.cmd_opts.add_option(
+            "--dry-run",
+            action="store_true",
+            dest="dry_run",
+            default=False,
+            help=(
+                "Don't actually install anything, just print what would be. "
+                "Can be used in combination with --ignore-installed "
+                "to 'resolve' the requirements."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "-t",
+            "--target",
+            dest="target_dir",
+            metavar="dir",
+            default=None,
+            help=(
+                "Install packages into <dir>. "
+                "By default this will not replace existing files/folders in "
+                "<dir>. Use --upgrade to replace existing packages in <dir> "
+                "with new versions."
+            ),
+        )
+        cmdoptions.add_target_python_options(self.cmd_opts)
+
+        self.cmd_opts.add_option(
+            "--user",
+            dest="use_user_site",
+            action="store_true",
+            help=(
+                "Install to the Python user install directory for your "
+                "platform. Typically ~/.local/, or %APPDATA%\\Python on "
+                "Windows. (See the Python documentation for site.USER_BASE "
+                "for full details.)"
+            ),
+        )
+        self.cmd_opts.add_option(
+            "--no-user",
+            dest="use_user_site",
+            action="store_false",
+            help=SUPPRESS_HELP,
+        )
+        self.cmd_opts.add_option(
+            "--root",
+            dest="root_path",
+            metavar="dir",
+            default=None,
+            help="Install everything relative to this alternate root directory.",
+        )
+        self.cmd_opts.add_option(
+            "--prefix",
+            dest="prefix_path",
+            metavar="dir",
+            default=None,
+            help=(
+                "Installation prefix where lib, bin and other top-level "
+                "folders are placed"
+            ),
+        )
+
+        self.cmd_opts.add_option(cmdoptions.src())
+
+        self.cmd_opts.add_option(
+            "-U",
+            "--upgrade",
+            dest="upgrade",
+            action="store_true",
+            help=(
+                "Upgrade all specified packages to the newest available "
+                "version. The handling of dependencies depends on the "
+                "upgrade-strategy used."
+            ),
+        )
+
+        self.cmd_opts.add_option(
+            "--upgrade-strategy",
+            dest="upgrade_strategy",
+            default="only-if-needed",
+            choices=["only-if-needed", "eager"],
+            help=(
+                "Determines how dependency upgrading should be handled "
+                "[default: %default]. "
+                '"eager" - dependencies are upgraded regardless of '
+                "whether the currently installed version satisfies the "
+                "requirements of the upgraded package(s). "
+                '"only-if-needed" -  are upgraded only when they do not '
+                "satisfy the requirements of the upgraded package(s)."
+            ),
+        )
+
+        self.cmd_opts.add_option(
+            "--force-reinstall",
+            dest="force_reinstall",
+            action="store_true",
+            help="Reinstall all packages even if they are already up-to-date.",
+        )
+
+        self.cmd_opts.add_option(
+            "-I",
+            "--ignore-installed",
+            dest="ignore_installed",
+            action="store_true",
+            help=(
+                "Ignore the installed packages, overwriting them. "
+                "This can break your system if the existing package "
+                "is of a different version or was installed "
+                "with a different package manager!"
+            ),
+        )
+
+        self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+        self.cmd_opts.add_option(cmdoptions.no_build_isolation())
+        self.cmd_opts.add_option(cmdoptions.use_pep517())
+        self.cmd_opts.add_option(cmdoptions.no_use_pep517())
+        self.cmd_opts.add_option(cmdoptions.check_build_deps())
+
+        self.cmd_opts.add_option(cmdoptions.config_settings())
+        self.cmd_opts.add_option(cmdoptions.install_options())
+        self.cmd_opts.add_option(cmdoptions.global_options())
+
+        self.cmd_opts.add_option(
+            "--compile",
+            action="store_true",
+            dest="compile",
+            default=True,
+            help="Compile Python source files to bytecode",
+        )
+
+        self.cmd_opts.add_option(
+            "--no-compile",
+            action="store_false",
+            dest="compile",
+            help="Do not compile Python source files to bytecode",
+        )
+
+        self.cmd_opts.add_option(
+            "--no-warn-script-location",
+            action="store_false",
+            dest="warn_script_location",
+            default=True,
+            help="Do not warn when installing scripts outside PATH",
+        )
+        self.cmd_opts.add_option(
+            "--no-warn-conflicts",
+            action="store_false",
+            dest="warn_about_conflicts",
+            default=True,
+            help="Do not warn about broken dependencies",
+        )
+        self.cmd_opts.add_option(cmdoptions.no_binary())
+        self.cmd_opts.add_option(cmdoptions.only_binary())
+        self.cmd_opts.add_option(cmdoptions.prefer_binary())
+        self.cmd_opts.add_option(cmdoptions.require_hashes())
+        self.cmd_opts.add_option(cmdoptions.progress_bar())
+        self.cmd_opts.add_option(cmdoptions.root_user_action())
+
+        index_opts = cmdoptions.make_option_group(
+            cmdoptions.index_group,
+            self.parser,
+        )
+
+        self.parser.insert_option_group(0, index_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+        self.cmd_opts.add_option(
+            "--report",
+            dest="json_report_file",
+            metavar="file",
+            default=None,
+            help=(
+                "Generate a JSON file describing what pip did to install "
+                "the provided requirements. "
+                "Can be used in combination with --dry-run and --ignore-installed "
+                "to 'resolve' the requirements. "
+                "When - is used as file name it writes to stdout. "
+                "When writing to stdout, please combine with the --quiet option "
+                "to avoid mixing pip logging output with JSON output."
+            ),
+        )
+
+    @with_cleanup
+    def run(self, options: Values, args: List[str]) -> int:
+        if options.use_user_site and options.target_dir is not None:
+            raise CommandError("Can not combine '--user' and '--target'")
+
+        upgrade_strategy = "to-satisfy-only"
+        if options.upgrade:
+            upgrade_strategy = options.upgrade_strategy
+
+        cmdoptions.check_dist_restriction(options, check_target=True)
+
+        install_options = options.install_options or []
+
+        logger.verbose("Using %s", get_pip_version())
+        options.use_user_site = decide_user_install(
+            options.use_user_site,
+            prefix_path=options.prefix_path,
+            target_dir=options.target_dir,
+            root_path=options.root_path,
+            isolated_mode=options.isolated_mode,
+        )
+
+        target_temp_dir: Optional[TempDirectory] = None
+        target_temp_dir_path: Optional[str] = None
+        if options.target_dir:
+            options.ignore_installed = True
+            options.target_dir = os.path.abspath(options.target_dir)
+            if (
+                # fmt: off
+                os.path.exists(options.target_dir) and
+                not os.path.isdir(options.target_dir)
+                # fmt: on
+            ):
+                raise CommandError(
+                    "Target path exists but is not a directory, will not continue."
+                )
+
+            # Create a target directory for using with the target option
+            target_temp_dir = TempDirectory(kind="target")
+            target_temp_dir_path = target_temp_dir.path
+            self.enter_context(target_temp_dir)
+
+        global_options = options.global_options or []
+
+        session = self.get_default_session(options)
+
+        target_python = make_target_python(options)
+        finder = self._build_package_finder(
+            options=options,
+            session=session,
+            target_python=target_python,
+            ignore_requires_python=options.ignore_requires_python,
+        )
+        build_tracker = self.enter_context(get_build_tracker())
+
+        directory = TempDirectory(
+            delete=not options.no_clean,
+            kind="install",
+            globally_managed=True,
+        )
+
+        try:
+            reqs = self.get_requirements(args, options, finder, session)
+            check_legacy_setup_py_options(
+                options, reqs, LegacySetupPyOptionsCheckMode.INSTALL
+            )
+
+            if "no-binary-enable-wheel-cache" in options.features_enabled:
+                # TODO: remove format_control from WheelCache when the deprecation cycle
+                # is over
+                wheel_cache = WheelCache(options.cache_dir)
+            else:
+                if options.format_control.no_binary:
+                    deprecated(
+                        reason=(
+                            "--no-binary currently disables reading from "
+                            "the cache of locally built wheels. In the future "
+                            "--no-binary will not influence the wheel cache."
+                        ),
+                        replacement="to use the --no-cache-dir option",
+                        feature_flag="no-binary-enable-wheel-cache",
+                        issue=11453,
+                        gone_in="23.1",
+                    )
+                wheel_cache = WheelCache(options.cache_dir, options.format_control)
+
+            # Only when installing is it permitted to use PEP 660.
+            # In other circumstances (pip wheel, pip download) we generate
+            # regular (i.e. non editable) metadata and wheels.
+            for req in reqs:
+                req.permit_editable_wheels = True
+
+            reject_location_related_install_options(reqs, options.install_options)
+
+            preparer = self.make_requirement_preparer(
+                temp_build_dir=directory,
+                options=options,
+                build_tracker=build_tracker,
+                session=session,
+                finder=finder,
+                use_user_site=options.use_user_site,
+                verbosity=self.verbosity,
+            )
+            resolver = self.make_resolver(
+                preparer=preparer,
+                finder=finder,
+                options=options,
+                wheel_cache=wheel_cache,
+                use_user_site=options.use_user_site,
+                ignore_installed=options.ignore_installed,
+                ignore_requires_python=options.ignore_requires_python,
+                force_reinstall=options.force_reinstall,
+                upgrade_strategy=upgrade_strategy,
+                use_pep517=options.use_pep517,
+            )
+
+            self.trace_basic_info(finder)
+
+            requirement_set = resolver.resolve(
+                reqs, check_supported_wheels=not options.target_dir
+            )
+
+            if options.json_report_file:
+                logger.warning(
+                    "--report is currently an experimental option. "
+                    "The output format may change in a future release "
+                    "without prior warning."
+                )
+
+                report = InstallationReport(requirement_set.requirements_to_install)
+                if options.json_report_file == "-":
+                    print_json(data=report.to_dict())
+                else:
+                    with open(options.json_report_file, "w", encoding="utf-8") as f:
+                        json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
+
+            if options.dry_run:
+                would_install_items = sorted(
+                    (r.metadata["name"], r.metadata["version"])
+                    for r in requirement_set.requirements_to_install
+                )
+                if would_install_items:
+                    write_output(
+                        "Would install %s",
+                        " ".join("-".join(item) for item in would_install_items),
+                    )
+                return SUCCESS
+
+            try:
+                pip_req = requirement_set.get_requirement("pip")
+            except KeyError:
+                modifying_pip = False
+            else:
+                # If we're not replacing an already installed pip,
+                # we're not modifying it.
+                modifying_pip = pip_req.satisfied_by is None
+            protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
+
+            check_bdist_wheel_allowed = get_check_bdist_wheel_allowed(
+                finder.format_control
+            )
+
+            reqs_to_build = [
+                r
+                for r in requirement_set.requirements.values()
+                if should_build_for_install_command(r, check_bdist_wheel_allowed)
+            ]
+
+            _, build_failures = build(
+                reqs_to_build,
+                wheel_cache=wheel_cache,
+                verify=True,
+                build_options=[],
+                global_options=global_options,
+            )
+
+            # If we're using PEP 517, we cannot do a legacy setup.py install
+            # so we fail here.
+            pep517_build_failure_names: List[str] = [
+                r.name for r in build_failures if r.use_pep517  # type: ignore
+            ]
+            if pep517_build_failure_names:
+                raise InstallationError(
+                    "Could not build wheels for {}, which is required to "
+                    "install pyproject.toml-based projects".format(
+                        ", ".join(pep517_build_failure_names)
+                    )
+                )
+
+            # For now, we just warn about failures building legacy
+            # requirements, as we'll fall through to a setup.py install for
+            # those.
+            for r in build_failures:
+                if not r.use_pep517:
+                    r.legacy_install_reason = LegacyInstallReasonFailedBdistWheel
+
+            to_install = resolver.get_installation_order(requirement_set)
+
+            # Check for conflicts in the package set we're installing.
+            conflicts: Optional[ConflictDetails] = None
+            should_warn_about_conflicts = (
+                not options.ignore_dependencies and options.warn_about_conflicts
+            )
+            if should_warn_about_conflicts:
+                conflicts = self._determine_conflicts(to_install)
+
+            # Don't warn about script install locations if
+            # --target or --prefix has been specified
+            warn_script_location = options.warn_script_location
+            if options.target_dir or options.prefix_path:
+                warn_script_location = False
+
+            installed = install_given_reqs(
+                to_install,
+                install_options,
+                global_options,
+                root=options.root_path,
+                home=target_temp_dir_path,
+                prefix=options.prefix_path,
+                warn_script_location=warn_script_location,
+                use_user_site=options.use_user_site,
+                pycompile=options.compile,
+            )
+
+            lib_locations = get_lib_location_guesses(
+                user=options.use_user_site,
+                home=target_temp_dir_path,
+                root=options.root_path,
+                prefix=options.prefix_path,
+                isolated=options.isolated_mode,
+            )
+            env = get_environment(lib_locations)
+
+            installed.sort(key=operator.attrgetter("name"))
+            items = []
+            for result in installed:
+                item = result.name
+                try:
+                    installed_dist = env.get_distribution(item)
+                    if installed_dist is not None:
+                        item = f"{item}-{installed_dist.version}"
+                except Exception:
+                    pass
+                items.append(item)
+
+            if conflicts is not None:
+                self._warn_about_conflicts(
+                    conflicts,
+                    resolver_variant=self.determine_resolver_variant(options),
+                )
+
+            installed_desc = " ".join(items)
+            if installed_desc:
+                write_output(
+                    "Successfully installed %s",
+                    installed_desc,
+                )
+        except OSError as error:
+            show_traceback = self.verbosity >= 1
+
+            message = create_os_error_message(
+                error,
+                show_traceback,
+                options.use_user_site,
+            )
+            logger.error(message, exc_info=show_traceback)  # noqa
+
+            return ERROR
+
+        if options.target_dir:
+            assert target_temp_dir
+            self._handle_target_dir(
+                options.target_dir, target_temp_dir, options.upgrade
+            )
+        if options.root_user_action == "warn":
+            warn_if_run_as_root()
+        return SUCCESS
+
+    def _handle_target_dir(
+        self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
+    ) -> None:
+        ensure_dir(target_dir)
+
+        # Checking both purelib and platlib directories for installed
+        # packages to be moved to target directory
+        lib_dir_list = []
+
+        # Checking both purelib and platlib directories for installed
+        # packages to be moved to target directory
+        scheme = get_scheme("", home=target_temp_dir.path)
+        purelib_dir = scheme.purelib
+        platlib_dir = scheme.platlib
+        data_dir = scheme.data
+
+        if os.path.exists(purelib_dir):
+            lib_dir_list.append(purelib_dir)
+        if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
+            lib_dir_list.append(platlib_dir)
+        if os.path.exists(data_dir):
+            lib_dir_list.append(data_dir)
+
+        for lib_dir in lib_dir_list:
+            for item in os.listdir(lib_dir):
+                if lib_dir == data_dir:
+                    ddir = os.path.join(data_dir, item)
+                    if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
+                        continue
+                target_item_dir = os.path.join(target_dir, item)
+                if os.path.exists(target_item_dir):
+                    if not upgrade:
+                        logger.warning(
+                            "Target directory %s already exists. Specify "
+                            "--upgrade to force replacement.",
+                            target_item_dir,
+                        )
+                        continue
+                    if os.path.islink(target_item_dir):
+                        logger.warning(
+                            "Target directory %s already exists and is "
+                            "a link. pip will not automatically replace "
+                            "links, please remove if replacement is "
+                            "desired.",
+                            target_item_dir,
+                        )
+                        continue
+                    if os.path.isdir(target_item_dir):
+                        shutil.rmtree(target_item_dir)
+                    else:
+                        os.remove(target_item_dir)
+
+                shutil.move(os.path.join(lib_dir, item), target_item_dir)
+
+    def _determine_conflicts(
+        self, to_install: List[InstallRequirement]
+    ) -> Optional[ConflictDetails]:
+        try:
+            return check_install_conflicts(to_install)
+        except Exception:
+            logger.exception(
+                "Error while checking for conflicts. Please file an issue on "
+                "pip's issue tracker: https://github.com/pypa/pip/issues/new"
+            )
+            return None
+
+    def _warn_about_conflicts(
+        self, conflict_details: ConflictDetails, resolver_variant: str
+    ) -> None:
+        package_set, (missing, conflicting) = conflict_details
+        if not missing and not conflicting:
+            return
+
+        parts: List[str] = []
+        if resolver_variant == "legacy":
+            parts.append(
+                "pip's legacy dependency resolver does not consider dependency "
+                "conflicts when selecting packages. This behaviour is the "
+                "source of the following dependency conflicts."
+            )
+        else:
+            assert resolver_variant == "2020-resolver"
+            parts.append(
+                "pip's dependency resolver does not currently take into account "
+                "all the packages that are installed. This behaviour is the "
+                "source of the following dependency conflicts."
+            )
+
+        # NOTE: There is some duplication here, with commands/check.py
+        for project_name in missing:
+            version = package_set[project_name][0]
+            for dependency in missing[project_name]:
+                message = (
+                    "{name} {version} requires {requirement}, "
+                    "which is not installed."
+                ).format(
+                    name=project_name,
+                    version=version,
+                    requirement=dependency[1],
+                )
+                parts.append(message)
+
+        for project_name in conflicting:
+            version = package_set[project_name][0]
+            for dep_name, dep_version, req in conflicting[project_name]:
+                message = (
+                    "{name} {version} requires {requirement}, but {you} have "
+                    "{dep_name} {dep_version} which is incompatible."
+                ).format(
+                    name=project_name,
+                    version=version,
+                    requirement=req,
+                    dep_name=dep_name,
+                    dep_version=dep_version,
+                    you=("you" if resolver_variant == "2020-resolver" else "you'll"),
+                )
+                parts.append(message)
+
+        logger.critical("\n".join(parts))
+
+
+def get_lib_location_guesses(
+    user: bool = False,
+    home: Optional[str] = None,
+    root: Optional[str] = None,
+    isolated: bool = False,
+    prefix: Optional[str] = None,
+) -> List[str]:
+    scheme = get_scheme(
+        "",
+        user=user,
+        home=home,
+        root=root,
+        isolated=isolated,
+        prefix=prefix,
+    )
+    return [scheme.purelib, scheme.platlib]
+
+
+def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
+    return all(
+        test_writable_dir(d)
+        for d in set(get_lib_location_guesses(root=root, isolated=isolated))
+    )
+
+
+def decide_user_install(
+    use_user_site: Optional[bool],
+    prefix_path: Optional[str] = None,
+    target_dir: Optional[str] = None,
+    root_path: Optional[str] = None,
+    isolated_mode: bool = False,
+) -> bool:
+    """Determine whether to do a user install based on the input options.
+
+    If use_user_site is False, no additional checks are done.
+    If use_user_site is True, it is checked for compatibility with other
+    options.
+    If use_user_site is None, the default behaviour depends on the environment,
+    which is provided by the other arguments.
+    """
+    # In some cases (config from tox), use_user_site can be set to an integer
+    # rather than a bool, which 'use_user_site is False' wouldn't catch.
+    if (use_user_site is not None) and (not use_user_site):
+        logger.debug("Non-user install by explicit request")
+        return False
+
+    if use_user_site:
+        if prefix_path:
+            raise CommandError(
+                "Can not combine '--user' and '--prefix' as they imply "
+                "different installation locations"
+            )
+        if virtualenv_no_global():
+            raise InstallationError(
+                "Can not perform a '--user' install. User site-packages "
+                "are not visible in this virtualenv."
+            )
+        logger.debug("User install by explicit request")
+        return True
+
+    # If we are here, user installs have not been explicitly requested/avoided
+    assert use_user_site is None
+
+    # user install incompatible with --prefix/--target
+    if prefix_path or target_dir:
+        logger.debug("Non-user install due to --prefix or --target option")
+        return False
+
+    # If user installs are not enabled, choose a non-user install
+    if not site.ENABLE_USER_SITE:
+        logger.debug("Non-user install because user site-packages disabled")
+        return False
+
+    # If we have permission for a non-user install, do that,
+    # otherwise do a user install.
+    if site_packages_writable(root=root_path, isolated=isolated_mode):
+        logger.debug("Non-user install because site-packages writeable")
+        return False
+
+    logger.info(
+        "Defaulting to user installation because normal site-packages "
+        "is not writeable"
+    )
+    return True
+
+
+def reject_location_related_install_options(
+    requirements: List[InstallRequirement], options: Optional[List[str]]
+) -> None:
+    """If any location-changing --install-option arguments were passed for
+    requirements or on the command-line, then show a deprecation warning.
+    """
+
+    def format_options(option_names: Iterable[str]) -> List[str]:
+        return ["--{}".format(name.replace("_", "-")) for name in option_names]
+
+    offenders = []
+
+    for requirement in requirements:
+        install_options = requirement.install_options
+        location_options = parse_distutils_args(install_options)
+        if location_options:
+            offenders.append(
+                "{!r} from {}".format(
+                    format_options(location_options.keys()), requirement
+                )
+            )
+
+    if options:
+        location_options = parse_distutils_args(options)
+        if location_options:
+            offenders.append(
+                "{!r} from command line".format(format_options(location_options.keys()))
+            )
+
+    if not offenders:
+        return
+
+    raise CommandError(
+        "Location-changing options found in --install-option: {}."
+        " This is unsupported, use pip-level options like --user,"
+        " --prefix, --root, and --target instead.".format("; ".join(offenders))
+    )
+
+
+def create_os_error_message(
+    error: OSError, show_traceback: bool, using_user_site: bool
+) -> str:
+    """Format an error message for an OSError
+
+    It may occur anytime during the execution of the install command.
+    """
+    parts = []
+
+    # Mention the error if we are not going to show a traceback
+    parts.append("Could not install packages due to an OSError")
+    if not show_traceback:
+        parts.append(": ")
+        parts.append(str(error))
+    else:
+        parts.append(".")
+
+    # Spilt the error indication from a helper message (if any)
+    parts[-1] += "\n"
+
+    # Suggest useful actions to the user:
+    #  (1) using user site-packages or (2) verifying the permissions
+    if error.errno == errno.EACCES:
+        user_option_part = "Consider using the `--user` option"
+        permissions_part = "Check the permissions"
+
+        if not running_under_virtualenv() and not using_user_site:
+            parts.extend(
+                [
+                    user_option_part,
+                    " or ",
+                    permissions_part.lower(),
+                ]
+            )
+        else:
+            parts.append(permissions_part)
+        parts.append(".\n")
+
+    # Suggest the user to enable Long Paths if path length is
+    # more than 260
+    if (
+        WINDOWS
+        and error.errno == errno.ENOENT
+        and error.filename
+        and len(error.filename) > 260
+    ):
+        parts.append(
+            "HINT: This error might have occurred since "
+            "this system does not have Windows Long Path "
+            "support enabled. You can find information on "
+            "how to enable this at "
+            "https://pip.pypa.io/warnings/enable-long-paths\n"
+        )
+
+    return "".join(parts).strip() + "\n"
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/list.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/list.py
new file mode 100644
index 0000000..8e1426d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/list.py
@@ -0,0 +1,365 @@
+import json
+import logging
+from optparse import Values
+from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import IndexGroupCommand
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution, get_environment
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.network.session import PipSession
+from pip._internal.utils.compat import stdlib_pkgs
+from pip._internal.utils.misc import tabulate, write_output
+
+if TYPE_CHECKING:
+    from pip._internal.metadata.base import DistributionVersion
+
+    class _DistWithLatestInfo(BaseDistribution):
+        """Give the distribution object a couple of extra fields.
+
+        These will be populated during ``get_outdated()``. This is dirty but
+        makes the rest of the code much cleaner.
+        """
+
+        latest_version: DistributionVersion
+        latest_filetype: str
+
+    _ProcessedDists = Sequence[_DistWithLatestInfo]
+
+
+logger = logging.getLogger(__name__)
+
+
+class ListCommand(IndexGroupCommand):
+    """
+    List installed packages, including editables.
+
+    Packages are listed in a case-insensitive sorted order.
+    """
+
+    ignore_require_venv = True
+    usage = """
+      %prog [options]"""
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-o",
+            "--outdated",
+            action="store_true",
+            default=False,
+            help="List outdated packages",
+        )
+        self.cmd_opts.add_option(
+            "-u",
+            "--uptodate",
+            action="store_true",
+            default=False,
+            help="List uptodate packages",
+        )
+        self.cmd_opts.add_option(
+            "-e",
+            "--editable",
+            action="store_true",
+            default=False,
+            help="List editable projects.",
+        )
+        self.cmd_opts.add_option(
+            "-l",
+            "--local",
+            action="store_true",
+            default=False,
+            help=(
+                "If in a virtualenv that has global access, do not list "
+                "globally-installed packages."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "--user",
+            dest="user",
+            action="store_true",
+            default=False,
+            help="Only output packages installed in user-site.",
+        )
+        self.cmd_opts.add_option(cmdoptions.list_path())
+        self.cmd_opts.add_option(
+            "--pre",
+            action="store_true",
+            default=False,
+            help=(
+                "Include pre-release and development versions. By default, "
+                "pip only finds stable versions."
+            ),
+        )
+
+        self.cmd_opts.add_option(
+            "--format",
+            action="store",
+            dest="list_format",
+            default="columns",
+            choices=("columns", "freeze", "json"),
+            help="Select the output format among: columns (default), freeze, or json",
+        )
+
+        self.cmd_opts.add_option(
+            "--not-required",
+            action="store_true",
+            dest="not_required",
+            help="List packages that are not dependencies of installed packages.",
+        )
+
+        self.cmd_opts.add_option(
+            "--exclude-editable",
+            action="store_false",
+            dest="include_editable",
+            help="Exclude editable package from output.",
+        )
+        self.cmd_opts.add_option(
+            "--include-editable",
+            action="store_true",
+            dest="include_editable",
+            help="Include editable package from output.",
+            default=True,
+        )
+        self.cmd_opts.add_option(cmdoptions.list_exclude())
+        index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
+
+        self.parser.insert_option_group(0, index_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def _build_package_finder(
+        self, options: Values, session: PipSession
+    ) -> PackageFinder:
+        """
+        Create a package finder appropriate to this list command.
+        """
+        link_collector = LinkCollector.create(session, options=options)
+
+        # Pass allow_yanked=False to ignore yanked versions.
+        selection_prefs = SelectionPreferences(
+            allow_yanked=False,
+            allow_all_prereleases=options.pre,
+        )
+
+        return PackageFinder.create(
+            link_collector=link_collector,
+            selection_prefs=selection_prefs,
+        )
+
+    def run(self, options: Values, args: List[str]) -> int:
+        if options.outdated and options.uptodate:
+            raise CommandError("Options --outdated and --uptodate cannot be combined.")
+
+        if options.outdated and options.list_format == "freeze":
+            raise CommandError(
+                "List format 'freeze' can not be used with the --outdated option."
+            )
+
+        cmdoptions.check_list_path_option(options)
+
+        skip = set(stdlib_pkgs)
+        if options.excludes:
+            skip.update(canonicalize_name(n) for n in options.excludes)
+
+        packages: "_ProcessedDists" = [
+            cast("_DistWithLatestInfo", d)
+            for d in get_environment(options.path).iter_installed_distributions(
+                local_only=options.local,
+                user_only=options.user,
+                editables_only=options.editable,
+                include_editables=options.include_editable,
+                skip=skip,
+            )
+        ]
+
+        # get_not_required must be called firstly in order to find and
+        # filter out all dependencies correctly. Otherwise a package
+        # can't be identified as requirement because some parent packages
+        # could be filtered out before.
+        if options.not_required:
+            packages = self.get_not_required(packages, options)
+
+        if options.outdated:
+            packages = self.get_outdated(packages, options)
+        elif options.uptodate:
+            packages = self.get_uptodate(packages, options)
+
+        self.output_package_listing(packages, options)
+        return SUCCESS
+
+    def get_outdated(
+        self, packages: "_ProcessedDists", options: Values
+    ) -> "_ProcessedDists":
+        return [
+            dist
+            for dist in self.iter_packages_latest_infos(packages, options)
+            if dist.latest_version > dist.version
+        ]
+
+    def get_uptodate(
+        self, packages: "_ProcessedDists", options: Values
+    ) -> "_ProcessedDists":
+        return [
+            dist
+            for dist in self.iter_packages_latest_infos(packages, options)
+            if dist.latest_version == dist.version
+        ]
+
+    def get_not_required(
+        self, packages: "_ProcessedDists", options: Values
+    ) -> "_ProcessedDists":
+        dep_keys = {
+            canonicalize_name(dep.name)
+            for dist in packages
+            for dep in (dist.iter_dependencies() or ())
+        }
+
+        # Create a set to remove duplicate packages, and cast it to a list
+        # to keep the return type consistent with get_outdated and
+        # get_uptodate
+        return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys})
+
+    def iter_packages_latest_infos(
+        self, packages: "_ProcessedDists", options: Values
+    ) -> Generator["_DistWithLatestInfo", None, None]:
+        with self._build_session(options) as session:
+            finder = self._build_package_finder(options, session)
+
+            def latest_info(
+                dist: "_DistWithLatestInfo",
+            ) -> Optional["_DistWithLatestInfo"]:
+                all_candidates = finder.find_all_candidates(dist.canonical_name)
+                if not options.pre:
+                    # Remove prereleases
+                    all_candidates = [
+                        candidate
+                        for candidate in all_candidates
+                        if not candidate.version.is_prerelease
+                    ]
+
+                evaluator = finder.make_candidate_evaluator(
+                    project_name=dist.canonical_name,
+                )
+                best_candidate = evaluator.sort_best_candidate(all_candidates)
+                if best_candidate is None:
+                    return None
+
+                remote_version = best_candidate.version
+                if best_candidate.link.is_wheel:
+                    typ = "wheel"
+                else:
+                    typ = "sdist"
+                dist.latest_version = remote_version
+                dist.latest_filetype = typ
+                return dist
+
+            for dist in map(latest_info, packages):
+                if dist is not None:
+                    yield dist
+
+    def output_package_listing(
+        self, packages: "_ProcessedDists", options: Values
+    ) -> None:
+        packages = sorted(
+            packages,
+            key=lambda dist: dist.canonical_name,
+        )
+        if options.list_format == "columns" and packages:
+            data, header = format_for_columns(packages, options)
+            self.output_package_listing_columns(data, header)
+        elif options.list_format == "freeze":
+            for dist in packages:
+                if options.verbose >= 1:
+                    write_output(
+                        "%s==%s (%s)", dist.raw_name, dist.version, dist.location
+                    )
+                else:
+                    write_output("%s==%s", dist.raw_name, dist.version)
+        elif options.list_format == "json":
+            write_output(format_for_json(packages, options))
+
+    def output_package_listing_columns(
+        self, data: List[List[str]], header: List[str]
+    ) -> None:
+        # insert the header first: we need to know the size of column names
+        if len(data) > 0:
+            data.insert(0, header)
+
+        pkg_strings, sizes = tabulate(data)
+
+        # Create and add a separator.
+        if len(data) > 0:
+            pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
+
+        for val in pkg_strings:
+            write_output(val)
+
+
+def format_for_columns(
+    pkgs: "_ProcessedDists", options: Values
+) -> Tuple[List[List[str]], List[str]]:
+    """
+    Convert the package data into something usable
+    by output_package_listing_columns.
+    """
+    header = ["Package", "Version"]
+
+    running_outdated = options.outdated
+    if running_outdated:
+        header.extend(["Latest", "Type"])
+
+    has_editables = any(x.editable for x in pkgs)
+    if has_editables:
+        header.append("Editable project location")
+
+    if options.verbose >= 1:
+        header.append("Location")
+    if options.verbose >= 1:
+        header.append("Installer")
+
+    data = []
+    for proj in pkgs:
+        # if we're working on the 'outdated' list, separate out the
+        # latest_version and type
+        row = [proj.raw_name, str(proj.version)]
+
+        if running_outdated:
+            row.append(str(proj.latest_version))
+            row.append(proj.latest_filetype)
+
+        if has_editables:
+            row.append(proj.editable_project_location or "")
+
+        if options.verbose >= 1:
+            row.append(proj.location or "")
+        if options.verbose >= 1:
+            row.append(proj.installer)
+
+        data.append(row)
+
+    return data, header
+
+
+def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
+    data = []
+    for dist in packages:
+        info = {
+            "name": dist.raw_name,
+            "version": str(dist.version),
+        }
+        if options.verbose >= 1:
+            info["location"] = dist.location or ""
+            info["installer"] = dist.installer
+        if options.outdated:
+            info["latest_version"] = str(dist.latest_version)
+            info["latest_filetype"] = dist.latest_filetype
+        editable_project_location = dist.editable_project_location
+        if editable_project_location:
+            info["editable_project_location"] = editable_project_location
+        data.append(info)
+    return json.dumps(data)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/search.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/search.py
new file mode 100644
index 0000000..03ed925
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/search.py
@@ -0,0 +1,174 @@
+import logging
+import shutil
+import sys
+import textwrap
+import xmlrpc.client
+from collections import OrderedDict
+from optparse import Values
+from typing import TYPE_CHECKING, Dict, List, Optional
+
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.req_command import SessionCommandMixin
+from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
+from pip._internal.exceptions import CommandError
+from pip._internal.metadata import get_default_environment
+from pip._internal.models.index import PyPI
+from pip._internal.network.xmlrpc import PipXmlrpcTransport
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import write_output
+
+if TYPE_CHECKING:
+    from typing import TypedDict
+
+    class TransformedHit(TypedDict):
+        name: str
+        summary: str
+        versions: List[str]
+
+
+logger = logging.getLogger(__name__)
+
+
+class SearchCommand(Command, SessionCommandMixin):
+    """Search for PyPI packages whose name or summary contains <query>."""
+
+    usage = """
+      %prog [options] <query>"""
+    ignore_require_venv = True
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-i",
+            "--index",
+            dest="index",
+            metavar="URL",
+            default=PyPI.pypi_url,
+            help="Base URL of Python Package Index (default %default)",
+        )
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        if not args:
+            raise CommandError("Missing required argument (search query).")
+        query = args
+        pypi_hits = self.search(query, options)
+        hits = transform_hits(pypi_hits)
+
+        terminal_width = None
+        if sys.stdout.isatty():
+            terminal_width = shutil.get_terminal_size()[0]
+
+        print_results(hits, terminal_width=terminal_width)
+        if pypi_hits:
+            return SUCCESS
+        return NO_MATCHES_FOUND
+
+    def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
+        index_url = options.index
+
+        session = self.get_default_session(options)
+
+        transport = PipXmlrpcTransport(index_url, session)
+        pypi = xmlrpc.client.ServerProxy(index_url, transport)
+        try:
+            hits = pypi.search({"name": query, "summary": query}, "or")
+        except xmlrpc.client.Fault as fault:
+            message = "XMLRPC request failed [code: {code}]\n{string}".format(
+                code=fault.faultCode,
+                string=fault.faultString,
+            )
+            raise CommandError(message)
+        assert isinstance(hits, list)
+        return hits
+
+
+def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
+    """
+    The list from pypi is really a list of versions. We want a list of
+    packages with the list of versions stored inline. This converts the
+    list from pypi into one we can use.
+    """
+    packages: Dict[str, "TransformedHit"] = OrderedDict()
+    for hit in hits:
+        name = hit["name"]
+        summary = hit["summary"]
+        version = hit["version"]
+
+        if name not in packages.keys():
+            packages[name] = {
+                "name": name,
+                "summary": summary,
+                "versions": [version],
+            }
+        else:
+            packages[name]["versions"].append(version)
+
+            # if this is the highest version, replace summary and score
+            if version == highest_version(packages[name]["versions"]):
+                packages[name]["summary"] = summary
+
+    return list(packages.values())
+
+
+def print_dist_installation_info(name: str, latest: str) -> None:
+    env = get_default_environment()
+    dist = env.get_distribution(name)
+    if dist is not None:
+        with indent_log():
+            if dist.version == latest:
+                write_output("INSTALLED: %s (latest)", dist.version)
+            else:
+                write_output("INSTALLED: %s", dist.version)
+                if parse_version(latest).pre:
+                    write_output(
+                        "LATEST:    %s (pre-release; install"
+                        " with `pip install --pre`)",
+                        latest,
+                    )
+                else:
+                    write_output("LATEST:    %s", latest)
+
+
+def print_results(
+    hits: List["TransformedHit"],
+    name_column_width: Optional[int] = None,
+    terminal_width: Optional[int] = None,
+) -> None:
+    if not hits:
+        return
+    if name_column_width is None:
+        name_column_width = (
+            max(
+                [
+                    len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
+                    for hit in hits
+                ]
+            )
+            + 4
+        )
+
+    for hit in hits:
+        name = hit["name"]
+        summary = hit["summary"] or ""
+        latest = highest_version(hit.get("versions", ["-"]))
+        if terminal_width is not None:
+            target_width = terminal_width - name_column_width - 5
+            if target_width > 10:
+                # wrap and indent summary to fit terminal
+                summary_lines = textwrap.wrap(summary, target_width)
+                summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
+
+        name_latest = f"{name} ({latest})"
+        line = f"{name_latest:{name_column_width}} - {summary}"
+        try:
+            write_output(line)
+            print_dist_installation_info(name, latest)
+        except UnicodeEncodeError:
+            pass
+
+
+def highest_version(versions: List[str]) -> str:
+    return max(versions, key=parse_version)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/show.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/show.py
new file mode 100644
index 0000000..212167c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/show.py
@@ -0,0 +1,183 @@
+import logging
+from optparse import Values
+from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.metadata import BaseDistribution, get_default_environment
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class ShowCommand(Command):
+    """
+    Show information about one or more installed packages.
+
+    The output is in RFC-compliant mail header format.
+    """
+
+    usage = """
+      %prog [options] <package> ..."""
+    ignore_require_venv = True
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-f",
+            "--files",
+            dest="files",
+            action="store_true",
+            default=False,
+            help="Show the full list of installed files for each package.",
+        )
+
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        if not args:
+            logger.warning("ERROR: Please provide a package name or names.")
+            return ERROR
+        query = args
+
+        results = search_packages_info(query)
+        if not print_results(
+            results, list_files=options.files, verbose=options.verbose
+        ):
+            return ERROR
+        return SUCCESS
+
+
+class _PackageInfo(NamedTuple):
+    name: str
+    version: str
+    location: str
+    requires: List[str]
+    required_by: List[str]
+    installer: str
+    metadata_version: str
+    classifiers: List[str]
+    summary: str
+    homepage: str
+    project_urls: List[str]
+    author: str
+    author_email: str
+    license: str
+    entry_points: List[str]
+    files: Optional[List[str]]
+
+
+def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
+    """
+    Gather details from installed distributions. Print distribution name,
+    version, location, and installed files. Installed files requires a
+    pip generated 'installed-files.txt' in the distributions '.egg-info'
+    directory.
+    """
+    env = get_default_environment()
+
+    installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
+    query_names = [canonicalize_name(name) for name in query]
+    missing = sorted(
+        [name for name, pkg in zip(query, query_names) if pkg not in installed]
+    )
+    if missing:
+        logger.warning("Package(s) not found: %s", ", ".join(missing))
+
+    def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
+        return (
+            dist.metadata["Name"] or "UNKNOWN"
+            for dist in installed.values()
+            if current_dist.canonical_name
+            in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
+        )
+
+    for query_name in query_names:
+        try:
+            dist = installed[query_name]
+        except KeyError:
+            continue
+
+        requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower)
+        required_by = sorted(_get_requiring_packages(dist), key=str.lower)
+
+        try:
+            entry_points_text = dist.read_text("entry_points.txt")
+            entry_points = entry_points_text.splitlines(keepends=False)
+        except FileNotFoundError:
+            entry_points = []
+
+        files_iter = dist.iter_declared_entries()
+        if files_iter is None:
+            files: Optional[List[str]] = None
+        else:
+            files = sorted(files_iter)
+
+        metadata = dist.metadata
+
+        yield _PackageInfo(
+            name=dist.raw_name,
+            version=str(dist.version),
+            location=dist.location or "",
+            requires=requires,
+            required_by=required_by,
+            installer=dist.installer,
+            metadata_version=dist.metadata_version or "",
+            classifiers=metadata.get_all("Classifier", []),
+            summary=metadata.get("Summary", ""),
+            homepage=metadata.get("Home-page", ""),
+            project_urls=metadata.get_all("Project-URL", []),
+            author=metadata.get("Author", ""),
+            author_email=metadata.get("Author-email", ""),
+            license=metadata.get("License", ""),
+            entry_points=entry_points,
+            files=files,
+        )
+
+
+def print_results(
+    distributions: Iterable[_PackageInfo],
+    list_files: bool,
+    verbose: bool,
+) -> bool:
+    """
+    Print the information from installed distributions found.
+    """
+    results_printed = False
+    for i, dist in enumerate(distributions):
+        results_printed = True
+        if i > 0:
+            write_output("---")
+
+        write_output("Name: %s", dist.name)
+        write_output("Version: %s", dist.version)
+        write_output("Summary: %s", dist.summary)
+        write_output("Home-page: %s", dist.homepage)
+        write_output("Author: %s", dist.author)
+        write_output("Author-email: %s", dist.author_email)
+        write_output("License: %s", dist.license)
+        write_output("Location: %s", dist.location)
+        write_output("Requires: %s", ", ".join(dist.requires))
+        write_output("Required-by: %s", ", ".join(dist.required_by))
+
+        if verbose:
+            write_output("Metadata-Version: %s", dist.metadata_version)
+            write_output("Installer: %s", dist.installer)
+            write_output("Classifiers:")
+            for classifier in dist.classifiers:
+                write_output("  %s", classifier)
+            write_output("Entry-points:")
+            for entry in dist.entry_points:
+                write_output("  %s", entry.strip())
+            write_output("Project-URLs:")
+            for project_url in dist.project_urls:
+                write_output("  %s", project_url)
+        if list_files:
+            write_output("Files:")
+            if dist.files is None:
+                write_output("Cannot locate RECORD or installed-files.txt")
+            else:
+                for line in dist.files:
+                    write_output("  %s", line.strip())
+    return results_printed
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/uninstall.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/uninstall.py
new file mode 100644
index 0000000..dea8077
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/uninstall.py
@@ -0,0 +1,106 @@
+import logging
+from optparse import Values
+from typing import List
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import InstallationError
+from pip._internal.req import parse_requirements
+from pip._internal.req.constructors import (
+    install_req_from_line,
+    install_req_from_parsed_requirement,
+)
+from pip._internal.utils.misc import protect_pip_from_modification_on_windows
+
+logger = logging.getLogger(__name__)
+
+
+class UninstallCommand(Command, SessionCommandMixin):
+    """
+    Uninstall packages.
+
+    pip is able to uninstall most installed packages. Known exceptions are:
+
+    - Pure distutils packages installed with ``python setup.py install``, which
+      leave behind no metadata to determine what files were installed.
+    - Script wrappers installed by ``python setup.py develop``.
+    """
+
+    usage = """
+      %prog [options] <package> ...
+      %prog [options] -r <requirements file> ..."""
+
+    def add_options(self) -> None:
+        self.cmd_opts.add_option(
+            "-r",
+            "--requirement",
+            dest="requirements",
+            action="append",
+            default=[],
+            metavar="file",
+            help=(
+                "Uninstall all the packages listed in the given requirements "
+                "file.  This option can be used multiple times."
+            ),
+        )
+        self.cmd_opts.add_option(
+            "-y",
+            "--yes",
+            dest="yes",
+            action="store_true",
+            help="Don't ask for confirmation of uninstall deletions.",
+        )
+        self.cmd_opts.add_option(cmdoptions.root_user_action())
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    def run(self, options: Values, args: List[str]) -> int:
+        session = self.get_default_session(options)
+
+        reqs_to_uninstall = {}
+        for name in args:
+            req = install_req_from_line(
+                name,
+                isolated=options.isolated_mode,
+            )
+            if req.name:
+                reqs_to_uninstall[canonicalize_name(req.name)] = req
+            else:
+                logger.warning(
+                    "Invalid requirement: %r ignored -"
+                    " the uninstall command expects named"
+                    " requirements.",
+                    name,
+                )
+        for filename in options.requirements:
+            for parsed_req in parse_requirements(
+                filename, options=options, session=session
+            ):
+                req = install_req_from_parsed_requirement(
+                    parsed_req, isolated=options.isolated_mode
+                )
+                if req.name:
+                    reqs_to_uninstall[canonicalize_name(req.name)] = req
+        if not reqs_to_uninstall:
+            raise InstallationError(
+                f"You must give at least one requirement to {self.name} (see "
+                f'"pip help {self.name}")'
+            )
+
+        protect_pip_from_modification_on_windows(
+            modifying_pip="pip" in reqs_to_uninstall
+        )
+
+        for req in reqs_to_uninstall.values():
+            uninstall_pathset = req.uninstall(
+                auto_confirm=options.yes,
+                verbose=self.verbosity > 0,
+            )
+            if uninstall_pathset:
+                uninstall_pathset.commit()
+        if options.root_user_action == "warn":
+            warn_if_run_as_root()
+        return SUCCESS
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/commands/wheel.py b/venv/lib/python3.9/site-packages/pip/_internal/commands/wheel.py
new file mode 100644
index 0000000..1afbd56
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/commands/wheel.py
@@ -0,0 +1,203 @@
+import logging
+import os
+import shutil
+from optparse import Values
+from typing import List
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import RequirementCommand, with_cleanup
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+from pip._internal.operations.build.build_tracker import get_build_tracker
+from pip._internal.req.req_install import (
+    InstallRequirement,
+    LegacySetupPyOptionsCheckMode,
+    check_legacy_setup_py_options,
+)
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.misc import ensure_dir, normalize_path
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.wheel_builder import build, should_build_for_wheel_command
+
+logger = logging.getLogger(__name__)
+
+
+class WheelCommand(RequirementCommand):
+    """
+    Build Wheel archives for your requirements and dependencies.
+
+    Wheel is a built-package format, and offers the advantage of not
+    recompiling your software during every install. For more details, see the
+    wheel docs: https://wheel.readthedocs.io/en/latest/
+
+    'pip wheel' uses the build system interface as described here:
+    https://pip.pypa.io/en/stable/reference/build-system/
+
+    """
+
+    usage = """
+      %prog [options] <requirement specifier> ...
+      %prog [options] -r <requirements file> ...
+      %prog [options] [-e] <vcs project url> ...
+      %prog [options] [-e] <local project path> ...
+      %prog [options] <archive url/path> ..."""
+
+    def add_options(self) -> None:
+
+        self.cmd_opts.add_option(
+            "-w",
+            "--wheel-dir",
+            dest="wheel_dir",
+            metavar="dir",
+            default=os.curdir,
+            help=(
+                "Build wheels into <dir>, where the default is the "
+                "current working directory."
+            ),
+        )
+        self.cmd_opts.add_option(cmdoptions.no_binary())
+        self.cmd_opts.add_option(cmdoptions.only_binary())
+        self.cmd_opts.add_option(cmdoptions.prefer_binary())
+        self.cmd_opts.add_option(cmdoptions.no_build_isolation())
+        self.cmd_opts.add_option(cmdoptions.use_pep517())
+        self.cmd_opts.add_option(cmdoptions.no_use_pep517())
+        self.cmd_opts.add_option(cmdoptions.check_build_deps())
+        self.cmd_opts.add_option(cmdoptions.constraints())
+        self.cmd_opts.add_option(cmdoptions.editable())
+        self.cmd_opts.add_option(cmdoptions.requirements())
+        self.cmd_opts.add_option(cmdoptions.src())
+        self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+        self.cmd_opts.add_option(cmdoptions.no_deps())
+        self.cmd_opts.add_option(cmdoptions.progress_bar())
+
+        self.cmd_opts.add_option(
+            "--no-verify",
+            dest="no_verify",
+            action="store_true",
+            default=False,
+            help="Don't verify if built wheel is valid.",
+        )
+
+        self.cmd_opts.add_option(cmdoptions.config_settings())
+        self.cmd_opts.add_option(cmdoptions.build_options())
+        self.cmd_opts.add_option(cmdoptions.global_options())
+
+        self.cmd_opts.add_option(
+            "--pre",
+            action="store_true",
+            default=False,
+            help=(
+                "Include pre-release and development versions. By default, "
+                "pip only finds stable versions."
+            ),
+        )
+
+        self.cmd_opts.add_option(cmdoptions.require_hashes())
+
+        index_opts = cmdoptions.make_option_group(
+            cmdoptions.index_group,
+            self.parser,
+        )
+
+        self.parser.insert_option_group(0, index_opts)
+        self.parser.insert_option_group(0, self.cmd_opts)
+
+    @with_cleanup
+    def run(self, options: Values, args: List[str]) -> int:
+        session = self.get_default_session(options)
+
+        finder = self._build_package_finder(options, session)
+        wheel_cache = WheelCache(options.cache_dir, options.format_control)
+
+        options.wheel_dir = normalize_path(options.wheel_dir)
+        ensure_dir(options.wheel_dir)
+
+        build_tracker = self.enter_context(get_build_tracker())
+
+        directory = TempDirectory(
+            delete=not options.no_clean,
+            kind="wheel",
+            globally_managed=True,
+        )
+
+        reqs = self.get_requirements(args, options, finder, session)
+        check_legacy_setup_py_options(
+            options, reqs, LegacySetupPyOptionsCheckMode.WHEEL
+        )
+
+        if "no-binary-enable-wheel-cache" in options.features_enabled:
+            # TODO: remove format_control from WheelCache when the deprecation cycle
+            # is over
+            wheel_cache = WheelCache(options.cache_dir)
+        else:
+            if options.format_control.no_binary:
+                deprecated(
+                    reason=(
+                        "--no-binary currently disables reading from "
+                        "the cache of locally built wheels. In the future "
+                        "--no-binary will not influence the wheel cache."
+                    ),
+                    replacement="to use the --no-cache-dir option",
+                    feature_flag="no-binary-enable-wheel-cache",
+                    issue=11453,
+                    gone_in="23.1",
+                )
+            wheel_cache = WheelCache(options.cache_dir, options.format_control)
+
+        preparer = self.make_requirement_preparer(
+            temp_build_dir=directory,
+            options=options,
+            build_tracker=build_tracker,
+            session=session,
+            finder=finder,
+            download_dir=options.wheel_dir,
+            use_user_site=False,
+            verbosity=self.verbosity,
+        )
+
+        resolver = self.make_resolver(
+            preparer=preparer,
+            finder=finder,
+            options=options,
+            wheel_cache=wheel_cache,
+            ignore_requires_python=options.ignore_requires_python,
+            use_pep517=options.use_pep517,
+        )
+
+        self.trace_basic_info(finder)
+
+        requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
+
+        reqs_to_build: List[InstallRequirement] = []
+        for req in requirement_set.requirements.values():
+            if req.is_wheel:
+                preparer.save_linked_requirement(req)
+            elif should_build_for_wheel_command(req):
+                reqs_to_build.append(req)
+
+        # build wheels
+        build_successes, build_failures = build(
+            reqs_to_build,
+            wheel_cache=wheel_cache,
+            verify=(not options.no_verify),
+            build_options=options.build_options or [],
+            global_options=options.global_options or [],
+        )
+        for req in build_successes:
+            assert req.link and req.link.is_wheel
+            assert req.local_file_path
+            # copy from cache to target directory
+            try:
+                shutil.copy(req.local_file_path, options.wheel_dir)
+            except OSError as e:
+                logger.warning(
+                    "Building wheel for %s failed: %s",
+                    req.name,
+                    e,
+                )
+                build_failures.append(req)
+        if len(build_failures) != 0:
+            raise CommandError("Failed to build one or more wheels")
+
+        return SUCCESS
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/configuration.py b/venv/lib/python3.9/site-packages/pip/_internal/configuration.py
new file mode 100644
index 0000000..8fd46c9
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/configuration.py
@@ -0,0 +1,374 @@
+"""Configuration management setup
+
+Some terminology:
+- name
+  As written in config files.
+- value
+  Value associated with a name
+- key
+  Name combined with it's section (section.name)
+- variant
+  A single word describing where the configuration key-value pair came from
+"""
+
+import configparser
+import locale
+import os
+import sys
+from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
+
+from pip._internal.exceptions import (
+    ConfigurationError,
+    ConfigurationFileCouldNotBeLoaded,
+)
+from pip._internal.utils import appdirs
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.logging import getLogger
+from pip._internal.utils.misc import ensure_dir, enum
+
+RawConfigParser = configparser.RawConfigParser  # Shorthand
+Kind = NewType("Kind", str)
+
+CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
+ENV_NAMES_IGNORED = "version", "help"
+
+# The kinds of configurations there are.
+kinds = enum(
+    USER="user",  # User Specific
+    GLOBAL="global",  # System Wide
+    SITE="site",  # [Virtual] Environment Specific
+    ENV="env",  # from PIP_CONFIG_FILE
+    ENV_VAR="env-var",  # from Environment Variables
+)
+OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
+VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
+
+logger = getLogger(__name__)
+
+
+# NOTE: Maybe use the optionx attribute to normalize keynames.
+def _normalize_name(name: str) -> str:
+    """Make a name consistent regardless of source (environment or file)"""
+    name = name.lower().replace("_", "-")
+    if name.startswith("--"):
+        name = name[2:]  # only prefer long opts
+    return name
+
+
+def _disassemble_key(name: str) -> List[str]:
+    if "." not in name:
+        error_message = (
+            "Key does not contain dot separated section and key. "
+            "Perhaps you wanted to use 'global.{}' instead?"
+        ).format(name)
+        raise ConfigurationError(error_message)
+    return name.split(".", 1)
+
+
+def get_configuration_files() -> Dict[Kind, List[str]]:
+    global_config_files = [
+        os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
+    ]
+
+    site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
+    legacy_config_file = os.path.join(
+        os.path.expanduser("~"),
+        "pip" if WINDOWS else ".pip",
+        CONFIG_BASENAME,
+    )
+    new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
+    return {
+        kinds.GLOBAL: global_config_files,
+        kinds.SITE: [site_config_file],
+        kinds.USER: [legacy_config_file, new_config_file],
+    }
+
+
+class Configuration:
+    """Handles management of configuration.
+
+    Provides an interface to accessing and managing configuration files.
+
+    This class converts provides an API that takes "section.key-name" style
+    keys and stores the value associated with it as "key-name" under the
+    section "section".
+
+    This allows for a clean interface wherein the both the section and the
+    key-name are preserved in an easy to manage form in the configuration files
+    and the data stored is also nice.
+    """
+
+    def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
+        super().__init__()
+
+        if load_only is not None and load_only not in VALID_LOAD_ONLY:
+            raise ConfigurationError(
+                "Got invalid value for load_only - should be one of {}".format(
+                    ", ".join(map(repr, VALID_LOAD_ONLY))
+                )
+            )
+        self.isolated = isolated
+        self.load_only = load_only
+
+        # Because we keep track of where we got the data from
+        self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
+            variant: [] for variant in OVERRIDE_ORDER
+        }
+        self._config: Dict[Kind, Dict[str, Any]] = {
+            variant: {} for variant in OVERRIDE_ORDER
+        }
+        self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
+
+    def load(self) -> None:
+        """Loads configuration from configuration files and environment"""
+        self._load_config_files()
+        if not self.isolated:
+            self._load_environment_vars()
+
+    def get_file_to_edit(self) -> Optional[str]:
+        """Returns the file with highest priority in configuration"""
+        assert self.load_only is not None, "Need to be specified a file to be editing"
+
+        try:
+            return self._get_parser_to_modify()[0]
+        except IndexError:
+            return None
+
+    def items(self) -> Iterable[Tuple[str, Any]]:
+        """Returns key-value pairs like dict.items() representing the loaded
+        configuration
+        """
+        return self._dictionary.items()
+
+    def get_value(self, key: str) -> Any:
+        """Get a value from the configuration."""
+        orig_key = key
+        key = _normalize_name(key)
+        try:
+            return self._dictionary[key]
+        except KeyError:
+            # disassembling triggers a more useful error message than simply
+            # "No such key" in the case that the key isn't in the form command.option
+            _disassemble_key(key)
+            raise ConfigurationError(f"No such key - {orig_key}")
+
+    def set_value(self, key: str, value: Any) -> None:
+        """Modify a value in the configuration."""
+        key = _normalize_name(key)
+        self._ensure_have_load_only()
+
+        assert self.load_only
+        fname, parser = self._get_parser_to_modify()
+
+        if parser is not None:
+            section, name = _disassemble_key(key)
+
+            # Modify the parser and the configuration
+            if not parser.has_section(section):
+                parser.add_section(section)
+            parser.set(section, name, value)
+
+        self._config[self.load_only][key] = value
+        self._mark_as_modified(fname, parser)
+
+    def unset_value(self, key: str) -> None:
+        """Unset a value in the configuration."""
+        orig_key = key
+        key = _normalize_name(key)
+        self._ensure_have_load_only()
+
+        assert self.load_only
+        if key not in self._config[self.load_only]:
+            raise ConfigurationError(f"No such key - {orig_key}")
+
+        fname, parser = self._get_parser_to_modify()
+
+        if parser is not None:
+            section, name = _disassemble_key(key)
+            if not (
+                parser.has_section(section) and parser.remove_option(section, name)
+            ):
+                # The option was not removed.
+                raise ConfigurationError(
+                    "Fatal Internal error [id=1]. Please report as a bug."
+                )
+
+            # The section may be empty after the option was removed.
+            if not parser.items(section):
+                parser.remove_section(section)
+            self._mark_as_modified(fname, parser)
+
+        del self._config[self.load_only][key]
+
+    def save(self) -> None:
+        """Save the current in-memory state."""
+        self._ensure_have_load_only()
+
+        for fname, parser in self._modified_parsers:
+            logger.info("Writing to %s", fname)
+
+            # Ensure directory exists.
+            ensure_dir(os.path.dirname(fname))
+
+            with open(fname, "w") as f:
+                parser.write(f)
+
+    #
+    # Private routines
+    #
+
+    def _ensure_have_load_only(self) -> None:
+        if self.load_only is None:
+            raise ConfigurationError("Needed a specific file to be modifying.")
+        logger.debug("Will be working with %s variant only", self.load_only)
+
+    @property
+    def _dictionary(self) -> Dict[str, Any]:
+        """A dictionary representing the loaded configuration."""
+        # NOTE: Dictionaries are not populated if not loaded. So, conditionals
+        #       are not needed here.
+        retval = {}
+
+        for variant in OVERRIDE_ORDER:
+            retval.update(self._config[variant])
+
+        return retval
+
+    def _load_config_files(self) -> None:
+        """Loads configuration from configuration files"""
+        config_files = dict(self.iter_config_files())
+        if config_files[kinds.ENV][0:1] == [os.devnull]:
+            logger.debug(
+                "Skipping loading configuration files due to "
+                "environment's PIP_CONFIG_FILE being os.devnull"
+            )
+            return
+
+        for variant, files in config_files.items():
+            for fname in files:
+                # If there's specific variant set in `load_only`, load only
+                # that variant, not the others.
+                if self.load_only is not None and variant != self.load_only:
+                    logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
+                    continue
+
+                parser = self._load_file(variant, fname)
+
+                # Keeping track of the parsers used
+                self._parsers[variant].append((fname, parser))
+
+    def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
+        logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
+        parser = self._construct_parser(fname)
+
+        for section in parser.sections():
+            items = parser.items(section)
+            self._config[variant].update(self._normalized_keys(section, items))
+
+        return parser
+
+    def _construct_parser(self, fname: str) -> RawConfigParser:
+        parser = configparser.RawConfigParser()
+        # If there is no such file, don't bother reading it but create the
+        # parser anyway, to hold the data.
+        # Doing this is useful when modifying and saving files, where we don't
+        # need to construct a parser.
+        if os.path.exists(fname):
+            locale_encoding = locale.getpreferredencoding(False)
+            try:
+                parser.read(fname, encoding=locale_encoding)
+            except UnicodeDecodeError:
+                # See https://github.com/pypa/pip/issues/4963
+                raise ConfigurationFileCouldNotBeLoaded(
+                    reason=f"contains invalid {locale_encoding} characters",
+                    fname=fname,
+                )
+            except configparser.Error as error:
+                # See https://github.com/pypa/pip/issues/4893
+                raise ConfigurationFileCouldNotBeLoaded(error=error)
+        return parser
+
+    def _load_environment_vars(self) -> None:
+        """Loads configuration from environment variables"""
+        self._config[kinds.ENV_VAR].update(
+            self._normalized_keys(":env:", self.get_environ_vars())
+        )
+
+    def _normalized_keys(
+        self, section: str, items: Iterable[Tuple[str, Any]]
+    ) -> Dict[str, Any]:
+        """Normalizes items to construct a dictionary with normalized keys.
+
+        This routine is where the names become keys and are made the same
+        regardless of source - configuration files or environment.
+        """
+        normalized = {}
+        for name, val in items:
+            key = section + "." + _normalize_name(name)
+            normalized[key] = val
+        return normalized
+
+    def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
+        """Returns a generator with all environmental vars with prefix PIP_"""
+        for key, val in os.environ.items():
+            if key.startswith("PIP_"):
+                name = key[4:].lower()
+                if name not in ENV_NAMES_IGNORED:
+                    yield name, val
+
+    # XXX: This is patched in the tests.
+    def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
+        """Yields variant and configuration files associated with it.
+
+        This should be treated like items of a dictionary.
+        """
+        # SMELL: Move the conditions out of this function
+
+        # environment variables have the lowest priority
+        config_file = os.environ.get("PIP_CONFIG_FILE", None)
+        if config_file is not None:
+            yield kinds.ENV, [config_file]
+        else:
+            yield kinds.ENV, []
+
+        config_files = get_configuration_files()
+
+        # at the base we have any global configuration
+        yield kinds.GLOBAL, config_files[kinds.GLOBAL]
+
+        # per-user configuration next
+        should_load_user_config = not self.isolated and not (
+            config_file and os.path.exists(config_file)
+        )
+        if should_load_user_config:
+            # The legacy config file is overridden by the new config file
+            yield kinds.USER, config_files[kinds.USER]
+
+        # finally virtualenv configuration first trumping others
+        yield kinds.SITE, config_files[kinds.SITE]
+
+    def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
+        """Get values present in a config file"""
+        return self._config[variant]
+
+    def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
+        # Determine which parser to modify
+        assert self.load_only
+        parsers = self._parsers[self.load_only]
+        if not parsers:
+            # This should not happen if everything works correctly.
+            raise ConfigurationError(
+                "Fatal Internal error [id=2]. Please report as a bug."
+            )
+
+        # Use the highest priority parser.
+        return parsers[-1]
+
+    # XXX: This is patched in the tests.
+    def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
+        file_parser_tuple = (fname, parser)
+        if file_parser_tuple not in self._modified_parsers:
+            self._modified_parsers.append(file_parser_tuple)
+
+    def __repr__(self) -> str:
+        return f"{self.__class__.__name__}({self._dictionary!r})"
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/distributions/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__init__.py
new file mode 100644
index 0000000..9a89a83
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__init__.py
@@ -0,0 +1,21 @@
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.distributions.sdist import SourceDistribution
+from pip._internal.distributions.wheel import WheelDistribution
+from pip._internal.req.req_install import InstallRequirement
+
+
+def make_distribution_for_install_requirement(
+    install_req: InstallRequirement,
+) -> AbstractDistribution:
+    """Returns a Distribution for the given InstallRequirement"""
+    # Editable requirements will always be source distributions. They use the
+    # legacy logic until we create a modern standard for them.
+    if install_req.editable:
+        return SourceDistribution(install_req)
+
+    # If it's a wheel, it's a WheelDistribution
+    if install_req.is_wheel:
+        return WheelDistribution(install_req)
+
+    # Otherwise, a SourceDistribution
+    return SourceDistribution(install_req)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..1b9956d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/base.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/base.cpython-39.pyc
new file mode 100644
index 0000000..a5942cb
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/base.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-39.pyc
new file mode 100644
index 0000000..fb42ecb
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc
new file mode 100644
index 0000000..fc84d41
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc
new file mode 100644
index 0000000..e69479c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/distributions/base.py b/venv/lib/python3.9/site-packages/pip/_internal/distributions/base.py
new file mode 100644
index 0000000..75ce2dc
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/distributions/base.py
@@ -0,0 +1,39 @@
+import abc
+
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata.base import BaseDistribution
+from pip._internal.req import InstallRequirement
+
+
+class AbstractDistribution(metaclass=abc.ABCMeta):
+    """A base class for handling installable artifacts.
+
+    The requirements for anything installable are as follows:
+
+     - we must be able to determine the requirement name
+       (or we can't correctly handle the non-upgrade case).
+
+     - for packages with setup requirements, we must also be able
+       to determine their requirements without installing additional
+       packages (for the same reason as run-time dependencies)
+
+     - we must be able to create a Distribution object exposing the
+       above metadata.
+    """
+
+    def __init__(self, req: InstallRequirement) -> None:
+        super().__init__()
+        self.req = req
+
+    @abc.abstractmethod
+    def get_metadata_distribution(self) -> BaseDistribution:
+        raise NotImplementedError()
+
+    @abc.abstractmethod
+    def prepare_distribution_metadata(
+        self,
+        finder: PackageFinder,
+        build_isolation: bool,
+        check_build_deps: bool,
+    ) -> None:
+        raise NotImplementedError()
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/distributions/installed.py b/venv/lib/python3.9/site-packages/pip/_internal/distributions/installed.py
new file mode 100644
index 0000000..edb38aa
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/distributions/installed.py
@@ -0,0 +1,23 @@
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution
+
+
+class InstalledDistribution(AbstractDistribution):
+    """Represents an installed package.
+
+    This does not need any preparation as the required information has already
+    been computed.
+    """
+
+    def get_metadata_distribution(self) -> BaseDistribution:
+        assert self.req.satisfied_by is not None, "not actually installed"
+        return self.req.satisfied_by
+
+    def prepare_distribution_metadata(
+        self,
+        finder: PackageFinder,
+        build_isolation: bool,
+        check_build_deps: bool,
+    ) -> None:
+        pass
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py b/venv/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py
new file mode 100644
index 0000000..4c25647
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/distributions/sdist.py
@@ -0,0 +1,150 @@
+import logging
+from typing import Iterable, Set, Tuple
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.exceptions import InstallationError
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution
+from pip._internal.utils.subprocess import runner_with_spinner_message
+
+logger = logging.getLogger(__name__)
+
+
+class SourceDistribution(AbstractDistribution):
+    """Represents a source distribution.
+
+    The preparation step for these needs metadata for the packages to be
+    generated, either using PEP 517 or using the legacy `setup.py egg_info`.
+    """
+
+    def get_metadata_distribution(self) -> BaseDistribution:
+        return self.req.get_dist()
+
+    def prepare_distribution_metadata(
+        self,
+        finder: PackageFinder,
+        build_isolation: bool,
+        check_build_deps: bool,
+    ) -> None:
+        # Load pyproject.toml, to determine whether PEP 517 is to be used
+        self.req.load_pyproject_toml()
+
+        # Set up the build isolation, if this requirement should be isolated
+        should_isolate = self.req.use_pep517 and build_isolation
+        if should_isolate:
+            # Setup an isolated environment and install the build backend static
+            # requirements in it.
+            self._prepare_build_backend(finder)
+            # Check that if the requirement is editable, it either supports PEP 660 or
+            # has a setup.py or a setup.cfg. This cannot be done earlier because we need
+            # to setup the build backend to verify it supports build_editable, nor can
+            # it be done later, because we want to avoid installing build requirements
+            # needlessly. Doing it here also works around setuptools generating
+            # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
+            # without setup.py nor setup.cfg.
+            self.req.isolated_editable_sanity_check()
+            # Install the dynamic build requirements.
+            self._install_build_reqs(finder)
+        # Check if the current environment provides build dependencies
+        should_check_deps = self.req.use_pep517 and check_build_deps
+        if should_check_deps:
+            pyproject_requires = self.req.pyproject_requires
+            assert pyproject_requires is not None
+            conflicting, missing = self.req.build_env.check_requirements(
+                pyproject_requires
+            )
+            if conflicting:
+                self._raise_conflicts("the backend dependencies", conflicting)
+            if missing:
+                self._raise_missing_reqs(missing)
+        self.req.prepare_metadata()
+
+    def _prepare_build_backend(self, finder: PackageFinder) -> None:
+        # Isolate in a BuildEnvironment and install the build-time
+        # requirements.
+        pyproject_requires = self.req.pyproject_requires
+        assert pyproject_requires is not None
+
+        self.req.build_env = BuildEnvironment()
+        self.req.build_env.install_requirements(
+            finder, pyproject_requires, "overlay", kind="build dependencies"
+        )
+        conflicting, missing = self.req.build_env.check_requirements(
+            self.req.requirements_to_check
+        )
+        if conflicting:
+            self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
+        if missing:
+            logger.warning(
+                "Missing build requirements in pyproject.toml for %s.",
+                self.req,
+            )
+            logger.warning(
+                "The project does not specify a build backend, and "
+                "pip cannot fall back to setuptools without %s.",
+                " and ".join(map(repr, sorted(missing))),
+            )
+
+    def _get_build_requires_wheel(self) -> Iterable[str]:
+        with self.req.build_env:
+            runner = runner_with_spinner_message("Getting requirements to build wheel")
+            backend = self.req.pep517_backend
+            assert backend is not None
+            with backend.subprocess_runner(runner):
+                return backend.get_requires_for_build_wheel()
+
+    def _get_build_requires_editable(self) -> Iterable[str]:
+        with self.req.build_env:
+            runner = runner_with_spinner_message(
+                "Getting requirements to build editable"
+            )
+            backend = self.req.pep517_backend
+            assert backend is not None
+            with backend.subprocess_runner(runner):
+                return backend.get_requires_for_build_editable()
+
+    def _install_build_reqs(self, finder: PackageFinder) -> None:
+        # Install any extra build dependencies that the backend requests.
+        # This must be done in a second pass, as the pyproject.toml
+        # dependencies must be installed before we can call the backend.
+        if (
+            self.req.editable
+            and self.req.permit_editable_wheels
+            and self.req.supports_pyproject_editable()
+        ):
+            build_reqs = self._get_build_requires_editable()
+        else:
+            build_reqs = self._get_build_requires_wheel()
+        conflicting, missing = self.req.build_env.check_requirements(build_reqs)
+        if conflicting:
+            self._raise_conflicts("the backend dependencies", conflicting)
+        self.req.build_env.install_requirements(
+            finder, missing, "normal", kind="backend dependencies"
+        )
+
+    def _raise_conflicts(
+        self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
+    ) -> None:
+        format_string = (
+            "Some build dependencies for {requirement} "
+            "conflict with {conflicting_with}: {description}."
+        )
+        error_message = format_string.format(
+            requirement=self.req,
+            conflicting_with=conflicting_with,
+            description=", ".join(
+                f"{installed} is incompatible with {wanted}"
+                for installed, wanted in sorted(conflicting_reqs)
+            ),
+        )
+        raise InstallationError(error_message)
+
+    def _raise_missing_reqs(self, missing: Set[str]) -> None:
+        format_string = (
+            "Some build dependencies for {requirement} are missing: {missing}."
+        )
+        error_message = format_string.format(
+            requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
+        )
+        raise InstallationError(error_message)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/distributions/wheel.py b/venv/lib/python3.9/site-packages/pip/_internal/distributions/wheel.py
new file mode 100644
index 0000000..03aac77
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/distributions/wheel.py
@@ -0,0 +1,34 @@
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import (
+    BaseDistribution,
+    FilesystemWheel,
+    get_wheel_distribution,
+)
+
+
+class WheelDistribution(AbstractDistribution):
+    """Represents a wheel distribution.
+
+    This does not need any preparation as wheels can be directly unpacked.
+    """
+
+    def get_metadata_distribution(self) -> BaseDistribution:
+        """Loads the metadata from the wheel file into memory and returns a
+        Distribution that uses it, not relying on the wheel file or
+        requirement.
+        """
+        assert self.req.local_file_path, "Set as part of preparation during download"
+        assert self.req.name, "Wheels are never unnamed"
+        wheel = FilesystemWheel(self.req.local_file_path)
+        return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
+
+    def prepare_distribution_metadata(
+        self,
+        finder: PackageFinder,
+        build_isolation: bool,
+        check_build_deps: bool,
+    ) -> None:
+        pass
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/exceptions.py b/venv/lib/python3.9/site-packages/pip/_internal/exceptions.py
new file mode 100644
index 0000000..2ab1f59
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/exceptions.py
@@ -0,0 +1,660 @@
+"""Exceptions used throughout package.
+
+This module MUST NOT try to import from anything within `pip._internal` to
+operate. This is expected to be importable from any/all files within the
+subpackage and, thus, should not depend on them.
+"""
+
+import configparser
+import re
+from itertools import chain, groupby, repeat
+from typing import TYPE_CHECKING, Dict, List, Optional, Union
+
+from pip._vendor.requests.models import Request, Response
+from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
+from pip._vendor.rich.markup import escape
+from pip._vendor.rich.text import Text
+
+if TYPE_CHECKING:
+    from hashlib import _Hash
+    from typing import Literal
+
+    from pip._internal.metadata import BaseDistribution
+    from pip._internal.req.req_install import InstallRequirement
+
+
+#
+# Scaffolding
+#
+def _is_kebab_case(s: str) -> bool:
+    return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
+
+
+def _prefix_with_indent(
+    s: Union[Text, str],
+    console: Console,
+    *,
+    prefix: str,
+    indent: str,
+) -> Text:
+    if isinstance(s, Text):
+        text = s
+    else:
+        text = console.render_str(s)
+
+    return console.render_str(prefix, overflow="ignore") + console.render_str(
+        f"\n{indent}", overflow="ignore"
+    ).join(text.split(allow_blank=True))
+
+
+class PipError(Exception):
+    """The base pip error."""
+
+
+class DiagnosticPipError(PipError):
+    """An error, that presents diagnostic information to the user.
+
+    This contains a bunch of logic, to enable pretty presentation of our error
+    messages. Each error gets a unique reference. Each error can also include
+    additional context, a hint and/or a note -- which are presented with the
+    main error message in a consistent style.
+
+    This is adapted from the error output styling in `sphinx-theme-builder`.
+    """
+
+    reference: str
+
+    def __init__(
+        self,
+        *,
+        kind: 'Literal["error", "warning"]' = "error",
+        reference: Optional[str] = None,
+        message: Union[str, Text],
+        context: Optional[Union[str, Text]],
+        hint_stmt: Optional[Union[str, Text]],
+        note_stmt: Optional[Union[str, Text]] = None,
+        link: Optional[str] = None,
+    ) -> None:
+        # Ensure a proper reference is provided.
+        if reference is None:
+            assert hasattr(self, "reference"), "error reference not provided!"
+            reference = self.reference
+        assert _is_kebab_case(reference), "error reference must be kebab-case!"
+
+        self.kind = kind
+        self.reference = reference
+
+        self.message = message
+        self.context = context
+
+        self.note_stmt = note_stmt
+        self.hint_stmt = hint_stmt
+
+        self.link = link
+
+        super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
+
+    def __repr__(self) -> str:
+        return (
+            f"<{self.__class__.__name__}("
+            f"reference={self.reference!r}, "
+            f"message={self.message!r}, "
+            f"context={self.context!r}, "
+            f"note_stmt={self.note_stmt!r}, "
+            f"hint_stmt={self.hint_stmt!r}"
+            ")>"
+        )
+
+    def __rich_console__(
+        self,
+        console: Console,
+        options: ConsoleOptions,
+    ) -> RenderResult:
+        colour = "red" if self.kind == "error" else "yellow"
+
+        yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
+        yield ""
+
+        if not options.ascii_only:
+            # Present the main message, with relevant context indented.
+            if self.context is not None:
+                yield _prefix_with_indent(
+                    self.message,
+                    console,
+                    prefix=f"[{colour}]×[/] ",
+                    indent=f"[{colour}]│[/] ",
+                )
+                yield _prefix_with_indent(
+                    self.context,
+                    console,
+                    prefix=f"[{colour}]╰─>[/] ",
+                    indent=f"[{colour}]   [/] ",
+                )
+            else:
+                yield _prefix_with_indent(
+                    self.message,
+                    console,
+                    prefix="[red]×[/] ",
+                    indent="  ",
+                )
+        else:
+            yield self.message
+            if self.context is not None:
+                yield ""
+                yield self.context
+
+        if self.note_stmt is not None or self.hint_stmt is not None:
+            yield ""
+
+        if self.note_stmt is not None:
+            yield _prefix_with_indent(
+                self.note_stmt,
+                console,
+                prefix="[magenta bold]note[/]: ",
+                indent="      ",
+            )
+        if self.hint_stmt is not None:
+            yield _prefix_with_indent(
+                self.hint_stmt,
+                console,
+                prefix="[cyan bold]hint[/]: ",
+                indent="      ",
+            )
+
+        if self.link is not None:
+            yield ""
+            yield f"Link: {self.link}"
+
+
+#
+# Actual Errors
+#
+class ConfigurationError(PipError):
+    """General exception in configuration"""
+
+
+class InstallationError(PipError):
+    """General exception during installation"""
+
+
+class UninstallationError(PipError):
+    """General exception during uninstallation"""
+
+
+class MissingPyProjectBuildRequires(DiagnosticPipError):
+    """Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
+
+    reference = "missing-pyproject-build-system-requires"
+
+    def __init__(self, *, package: str) -> None:
+        super().__init__(
+            message=f"Can not process {escape(package)}",
+            context=Text(
+                "This package has an invalid pyproject.toml file.\n"
+                "The [build-system] table is missing the mandatory `requires` key."
+            ),
+            note_stmt="This is an issue with the package mentioned above, not pip.",
+            hint_stmt=Text("See PEP 518 for the detailed specification."),
+        )
+
+
+class InvalidPyProjectBuildRequires(DiagnosticPipError):
+    """Raised when pyproject.toml an invalid `build-system.requires`."""
+
+    reference = "invalid-pyproject-build-system-requires"
+
+    def __init__(self, *, package: str, reason: str) -> None:
+        super().__init__(
+            message=f"Can not process {escape(package)}",
+            context=Text(
+                "This package has an invalid `build-system.requires` key in "
+                f"pyproject.toml.\n{reason}"
+            ),
+            note_stmt="This is an issue with the package mentioned above, not pip.",
+            hint_stmt=Text("See PEP 518 for the detailed specification."),
+        )
+
+
+class NoneMetadataError(PipError):
+    """Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
+
+    This signifies an inconsistency, when the Distribution claims to have
+    the metadata file (if not, raise ``FileNotFoundError`` instead), but is
+    not actually able to produce its content. This may be due to permission
+    errors.
+    """
+
+    def __init__(
+        self,
+        dist: "BaseDistribution",
+        metadata_name: str,
+    ) -> None:
+        """
+        :param dist: A Distribution object.
+        :param metadata_name: The name of the metadata being accessed
+            (can be "METADATA" or "PKG-INFO").
+        """
+        self.dist = dist
+        self.metadata_name = metadata_name
+
+    def __str__(self) -> str:
+        # Use `dist` in the error message because its stringification
+        # includes more information, like the version and location.
+        return "None {} metadata found for distribution: {}".format(
+            self.metadata_name,
+            self.dist,
+        )
+
+
+class UserInstallationInvalid(InstallationError):
+    """A --user install is requested on an environment without user site."""
+
+    def __str__(self) -> str:
+        return "User base directory is not specified"
+
+
+class InvalidSchemeCombination(InstallationError):
+    def __str__(self) -> str:
+        before = ", ".join(str(a) for a in self.args[:-1])
+        return f"Cannot set {before} and {self.args[-1]} together"
+
+
+class DistributionNotFound(InstallationError):
+    """Raised when a distribution cannot be found to satisfy a requirement"""
+
+
+class RequirementsFileParseError(InstallationError):
+    """Raised when a general error occurs parsing a requirements file line."""
+
+
+class BestVersionAlreadyInstalled(PipError):
+    """Raised when the most up-to-date version of a package is already
+    installed."""
+
+
+class BadCommand(PipError):
+    """Raised when virtualenv or a command is not found"""
+
+
+class CommandError(PipError):
+    """Raised when there is an error in command-line arguments"""
+
+
+class PreviousBuildDirError(PipError):
+    """Raised when there's a previous conflicting build directory"""
+
+
+class NetworkConnectionError(PipError):
+    """HTTP connection error"""
+
+    def __init__(
+        self,
+        error_msg: str,
+        response: Optional[Response] = None,
+        request: Optional[Request] = None,
+    ) -> None:
+        """
+        Initialize NetworkConnectionError with  `request` and `response`
+        objects.
+        """
+        self.response = response
+        self.request = request
+        self.error_msg = error_msg
+        if (
+            self.response is not None
+            and not self.request
+            and hasattr(response, "request")
+        ):
+            self.request = self.response.request
+        super().__init__(error_msg, response, request)
+
+    def __str__(self) -> str:
+        return str(self.error_msg)
+
+
+class InvalidWheelFilename(InstallationError):
+    """Invalid wheel filename."""
+
+
+class UnsupportedWheel(InstallationError):
+    """Unsupported wheel."""
+
+
+class InvalidWheel(InstallationError):
+    """Invalid (e.g. corrupt) wheel."""
+
+    def __init__(self, location: str, name: str):
+        self.location = location
+        self.name = name
+
+    def __str__(self) -> str:
+        return f"Wheel '{self.name}' located at {self.location} is invalid."
+
+
+class MetadataInconsistent(InstallationError):
+    """Built metadata contains inconsistent information.
+
+    This is raised when the metadata contains values (e.g. name and version)
+    that do not match the information previously obtained from sdist filename,
+    user-supplied ``#egg=`` value, or an install requirement name.
+    """
+
+    def __init__(
+        self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
+    ) -> None:
+        self.ireq = ireq
+        self.field = field
+        self.f_val = f_val
+        self.m_val = m_val
+
+    def __str__(self) -> str:
+        return (
+            f"Requested {self.ireq} has inconsistent {self.field}: "
+            f"expected {self.f_val!r}, but metadata has {self.m_val!r}"
+        )
+
+
+class LegacyInstallFailure(DiagnosticPipError):
+    """Error occurred while executing `setup.py install`"""
+
+    reference = "legacy-install-failure"
+
+    def __init__(self, package_details: str) -> None:
+        super().__init__(
+            message="Encountered error while trying to install package.",
+            context=package_details,
+            hint_stmt="See above for output from the failure.",
+            note_stmt="This is an issue with the package mentioned above, not pip.",
+        )
+
+
+class InstallationSubprocessError(DiagnosticPipError, InstallationError):
+    """A subprocess call failed."""
+
+    reference = "subprocess-exited-with-error"
+
+    def __init__(
+        self,
+        *,
+        command_description: str,
+        exit_code: int,
+        output_lines: Optional[List[str]],
+    ) -> None:
+        if output_lines is None:
+            output_prompt = Text("See above for output.")
+        else:
+            output_prompt = (
+                Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
+                + Text("".join(output_lines))
+                + Text.from_markup(R"[red]\[end of output][/]")
+            )
+
+        super().__init__(
+            message=(
+                f"[green]{escape(command_description)}[/] did not run successfully.\n"
+                f"exit code: {exit_code}"
+            ),
+            context=output_prompt,
+            hint_stmt=None,
+            note_stmt=(
+                "This error originates from a subprocess, and is likely not a "
+                "problem with pip."
+            ),
+        )
+
+        self.command_description = command_description
+        self.exit_code = exit_code
+
+    def __str__(self) -> str:
+        return f"{self.command_description} exited with {self.exit_code}"
+
+
+class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
+    reference = "metadata-generation-failed"
+
+    def __init__(
+        self,
+        *,
+        package_details: str,
+    ) -> None:
+        super(InstallationSubprocessError, self).__init__(
+            message="Encountered error while generating package metadata.",
+            context=escape(package_details),
+            hint_stmt="See above for details.",
+            note_stmt="This is an issue with the package mentioned above, not pip.",
+        )
+
+    def __str__(self) -> str:
+        return "metadata generation failed"
+
+
+class HashErrors(InstallationError):
+    """Multiple HashError instances rolled into one for reporting"""
+
+    def __init__(self) -> None:
+        self.errors: List["HashError"] = []
+
+    def append(self, error: "HashError") -> None:
+        self.errors.append(error)
+
+    def __str__(self) -> str:
+        lines = []
+        self.errors.sort(key=lambda e: e.order)
+        for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
+            lines.append(cls.head)
+            lines.extend(e.body() for e in errors_of_cls)
+        if lines:
+            return "\n".join(lines)
+        return ""
+
+    def __bool__(self) -> bool:
+        return bool(self.errors)
+
+
+class HashError(InstallationError):
+    """
+    A failure to verify a package against known-good hashes
+
+    :cvar order: An int sorting hash exception classes by difficulty of
+        recovery (lower being harder), so the user doesn't bother fretting
+        about unpinned packages when he has deeper issues, like VCS
+        dependencies, to deal with. Also keeps error reports in a
+        deterministic order.
+    :cvar head: A section heading for display above potentially many
+        exceptions of this kind
+    :ivar req: The InstallRequirement that triggered this error. This is
+        pasted on after the exception is instantiated, because it's not
+        typically available earlier.
+
+    """
+
+    req: Optional["InstallRequirement"] = None
+    head = ""
+    order: int = -1
+
+    def body(self) -> str:
+        """Return a summary of me for display under the heading.
+
+        This default implementation simply prints a description of the
+        triggering requirement.
+
+        :param req: The InstallRequirement that provoked this error, with
+            its link already populated by the resolver's _populate_link().
+
+        """
+        return f"    {self._requirement_name()}"
+
+    def __str__(self) -> str:
+        return f"{self.head}\n{self.body()}"
+
+    def _requirement_name(self) -> str:
+        """Return a description of the requirement that triggered me.
+
+        This default implementation returns long description of the req, with
+        line numbers
+
+        """
+        return str(self.req) if self.req else "unknown package"
+
+
+class VcsHashUnsupported(HashError):
+    """A hash was provided for a version-control-system-based requirement, but
+    we don't have a method for hashing those."""
+
+    order = 0
+    head = (
+        "Can't verify hashes for these requirements because we don't "
+        "have a way to hash version control repositories:"
+    )
+
+
+class DirectoryUrlHashUnsupported(HashError):
+    """A hash was provided for a version-control-system-based requirement, but
+    we don't have a method for hashing those."""
+
+    order = 1
+    head = (
+        "Can't verify hashes for these file:// requirements because they "
+        "point to directories:"
+    )
+
+
+class HashMissing(HashError):
+    """A hash was needed for a requirement but is absent."""
+
+    order = 2
+    head = (
+        "Hashes are required in --require-hashes mode, but they are "
+        "missing from some requirements. Here is a list of those "
+        "requirements along with the hashes their downloaded archives "
+        "actually had. Add lines like these to your requirements files to "
+        "prevent tampering. (If you did not enable --require-hashes "
+        "manually, note that it turns on automatically when any package "
+        "has a hash.)"
+    )
+
+    def __init__(self, gotten_hash: str) -> None:
+        """
+        :param gotten_hash: The hash of the (possibly malicious) archive we
+            just downloaded
+        """
+        self.gotten_hash = gotten_hash
+
+    def body(self) -> str:
+        # Dodge circular import.
+        from pip._internal.utils.hashes import FAVORITE_HASH
+
+        package = None
+        if self.req:
+            # In the case of URL-based requirements, display the original URL
+            # seen in the requirements file rather than the package name,
+            # so the output can be directly copied into the requirements file.
+            package = (
+                self.req.original_link
+                if self.req.original_link
+                # In case someone feeds something downright stupid
+                # to InstallRequirement's constructor.
+                else getattr(self.req, "req", None)
+            )
+        return "    {} --hash={}:{}".format(
+            package or "unknown package", FAVORITE_HASH, self.gotten_hash
+        )
+
+
+class HashUnpinned(HashError):
+    """A requirement had a hash specified but was not pinned to a specific
+    version."""
+
+    order = 3
+    head = (
+        "In --require-hashes mode, all requirements must have their "
+        "versions pinned with ==. These do not:"
+    )
+
+
+class HashMismatch(HashError):
+    """
+    Distribution file hash values don't match.
+
+    :ivar package_name: The name of the package that triggered the hash
+        mismatch. Feel free to write to this after the exception is raise to
+        improve its error message.
+
+    """
+
+    order = 4
+    head = (
+        "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
+        "FILE. If you have updated the package versions, please update "
+        "the hashes. Otherwise, examine the package contents carefully; "
+        "someone may have tampered with them."
+    )
+
+    def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
+        """
+        :param allowed: A dict of algorithm names pointing to lists of allowed
+            hex digests
+        :param gots: A dict of algorithm names pointing to hashes we
+            actually got from the files under suspicion
+        """
+        self.allowed = allowed
+        self.gots = gots
+
+    def body(self) -> str:
+        return "    {}:\n{}".format(self._requirement_name(), self._hash_comparison())
+
+    def _hash_comparison(self) -> str:
+        """
+        Return a comparison of actual and expected hash values.
+
+        Example::
+
+               Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
+                            or 123451234512345123451234512345123451234512345
+                    Got        bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
+
+        """
+
+        def hash_then_or(hash_name: str) -> "chain[str]":
+            # For now, all the decent hashes have 6-char names, so we can get
+            # away with hard-coding space literals.
+            return chain([hash_name], repeat("    or"))
+
+        lines: List[str] = []
+        for hash_name, expecteds in self.allowed.items():
+            prefix = hash_then_or(hash_name)
+            lines.extend(
+                ("        Expected {} {}".format(next(prefix), e)) for e in expecteds
+            )
+            lines.append(
+                "             Got        {}\n".format(self.gots[hash_name].hexdigest())
+            )
+        return "\n".join(lines)
+
+
+class UnsupportedPythonVersion(InstallationError):
+    """Unsupported python version according to Requires-Python package
+    metadata."""
+
+
+class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
+    """When there are errors while loading a configuration file"""
+
+    def __init__(
+        self,
+        reason: str = "could not be loaded",
+        fname: Optional[str] = None,
+        error: Optional[configparser.Error] = None,
+    ) -> None:
+        super().__init__(error)
+        self.reason = reason
+        self.fname = fname
+        self.error = error
+
+    def __str__(self) -> str:
+        if self.fname is not None:
+            message_part = f" in {self.fname}."
+        else:
+            assert self.error is not None
+            message_part = f".\n{self.error}\n"
+        return f"Configuration file {self.reason}{message_part}"
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/index/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/index/__init__.py
new file mode 100644
index 0000000..7a17b7b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/index/__init__.py
@@ -0,0 +1,2 @@
+"""Index interaction code
+"""
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..75a4cc8
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/collector.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/collector.cpython-39.pyc
new file mode 100644
index 0000000..dbcc404
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/collector.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-39.pyc
new file mode 100644
index 0000000..84b9209
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/sources.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/sources.cpython-39.pyc
new file mode 100644
index 0000000..708ac58
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/index/__pycache__/sources.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/index/collector.py b/venv/lib/python3.9/site-packages/pip/_internal/index/collector.py
new file mode 100644
index 0000000..0120610
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/index/collector.py
@@ -0,0 +1,505 @@
+"""
+The main purpose of this module is to expose LinkCollector.collect_sources().
+"""
+
+import collections
+import email.message
+import functools
+import itertools
+import json
+import logging
+import os
+import urllib.parse
+import urllib.request
+from html.parser import HTMLParser
+from optparse import Values
+from typing import (
+    TYPE_CHECKING,
+    Callable,
+    Dict,
+    Iterable,
+    List,
+    MutableMapping,
+    NamedTuple,
+    Optional,
+    Sequence,
+    Tuple,
+    Union,
+)
+
+from pip._vendor import requests
+from pip._vendor.requests import Response
+from pip._vendor.requests.exceptions import RetryError, SSLError
+
+from pip._internal.exceptions import NetworkConnectionError
+from pip._internal.models.link import Link
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.network.session import PipSession
+from pip._internal.network.utils import raise_for_status
+from pip._internal.utils.filetypes import is_archive_file
+from pip._internal.utils.misc import redact_auth_from_url
+from pip._internal.vcs import vcs
+
+from .sources import CandidatesFromPage, LinkSource, build_source
+
+if TYPE_CHECKING:
+    from typing import Protocol
+else:
+    Protocol = object
+
+logger = logging.getLogger(__name__)
+
+ResponseHeaders = MutableMapping[str, str]
+
+
+def _match_vcs_scheme(url: str) -> Optional[str]:
+    """Look for VCS schemes in the URL.
+
+    Returns the matched VCS scheme, or None if there's no match.
+    """
+    for scheme in vcs.schemes:
+        if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
+            return scheme
+    return None
+
+
+class _NotAPIContent(Exception):
+    def __init__(self, content_type: str, request_desc: str) -> None:
+        super().__init__(content_type, request_desc)
+        self.content_type = content_type
+        self.request_desc = request_desc
+
+
+def _ensure_api_header(response: Response) -> None:
+    """
+    Check the Content-Type header to ensure the response contains a Simple
+    API Response.
+
+    Raises `_NotAPIContent` if the content type is not a valid content-type.
+    """
+    content_type = response.headers.get("Content-Type", "Unknown")
+
+    content_type_l = content_type.lower()
+    if content_type_l.startswith(
+        (
+            "text/html",
+            "application/vnd.pypi.simple.v1+html",
+            "application/vnd.pypi.simple.v1+json",
+        )
+    ):
+        return
+
+    raise _NotAPIContent(content_type, response.request.method)
+
+
+class _NotHTTP(Exception):
+    pass
+
+
+def _ensure_api_response(url: str, session: PipSession) -> None:
+    """
+    Send a HEAD request to the URL, and ensure the response contains a simple
+    API Response.
+
+    Raises `_NotHTTP` if the URL is not available for a HEAD request, or
+    `_NotAPIContent` if the content type is not a valid content type.
+    """
+    scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
+    if scheme not in {"http", "https"}:
+        raise _NotHTTP()
+
+    resp = session.head(url, allow_redirects=True)
+    raise_for_status(resp)
+
+    _ensure_api_header(resp)
+
+
+def _get_simple_response(url: str, session: PipSession) -> Response:
+    """Access an Simple API response with GET, and return the response.
+
+    This consists of three parts:
+
+    1. If the URL looks suspiciously like an archive, send a HEAD first to
+       check the Content-Type is HTML or Simple API, to avoid downloading a
+       large file. Raise `_NotHTTP` if the content type cannot be determined, or
+       `_NotAPIContent` if it is not HTML or a Simple API.
+    2. Actually perform the request. Raise HTTP exceptions on network failures.
+    3. Check the Content-Type header to make sure we got a Simple API response,
+       and raise `_NotAPIContent` otherwise.
+    """
+    if is_archive_file(Link(url).filename):
+        _ensure_api_response(url, session=session)
+
+    logger.debug("Getting page %s", redact_auth_from_url(url))
+
+    resp = session.get(
+        url,
+        headers={
+            "Accept": ", ".join(
+                [
+                    "application/vnd.pypi.simple.v1+json",
+                    "application/vnd.pypi.simple.v1+html; q=0.1",
+                    "text/html; q=0.01",
+                ]
+            ),
+            # We don't want to blindly returned cached data for
+            # /simple/, because authors generally expecting that
+            # twine upload && pip install will function, but if
+            # they've done a pip install in the last ~10 minutes
+            # it won't. Thus by setting this to zero we will not
+            # blindly use any cached data, however the benefit of
+            # using max-age=0 instead of no-cache, is that we will
+            # still support conditional requests, so we will still
+            # minimize traffic sent in cases where the page hasn't
+            # changed at all, we will just always incur the round
+            # trip for the conditional GET now instead of only
+            # once per 10 minutes.
+            # For more information, please see pypa/pip#5670.
+            "Cache-Control": "max-age=0",
+        },
+    )
+    raise_for_status(resp)
+
+    # The check for archives above only works if the url ends with
+    # something that looks like an archive. However that is not a
+    # requirement of an url. Unless we issue a HEAD request on every
+    # url we cannot know ahead of time for sure if something is a
+    # Simple API response or not. However we can check after we've
+    # downloaded it.
+    _ensure_api_header(resp)
+
+    logger.debug(
+        "Fetched page %s as %s",
+        redact_auth_from_url(url),
+        resp.headers.get("Content-Type", "Unknown"),
+    )
+
+    return resp
+
+
+def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
+    """Determine if we have any encoding information in our headers."""
+    if headers and "Content-Type" in headers:
+        m = email.message.Message()
+        m["content-type"] = headers["Content-Type"]
+        charset = m.get_param("charset")
+        if charset:
+            return str(charset)
+    return None
+
+
+class CacheablePageContent:
+    def __init__(self, page: "IndexContent") -> None:
+        assert page.cache_link_parsing
+        self.page = page
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, type(self)) and self.page.url == other.page.url
+
+    def __hash__(self) -> int:
+        return hash(self.page.url)
+
+
+class ParseLinks(Protocol):
+    def __call__(self, page: "IndexContent") -> Iterable[Link]:
+        ...
+
+
+def with_cached_index_content(fn: ParseLinks) -> ParseLinks:
+    """
+    Given a function that parses an Iterable[Link] from an IndexContent, cache the
+    function's result (keyed by CacheablePageContent), unless the IndexContent
+    `page` has `page.cache_link_parsing == False`.
+    """
+
+    @functools.lru_cache(maxsize=None)
+    def wrapper(cacheable_page: CacheablePageContent) -> List[Link]:
+        return list(fn(cacheable_page.page))
+
+    @functools.wraps(fn)
+    def wrapper_wrapper(page: "IndexContent") -> List[Link]:
+        if page.cache_link_parsing:
+            return wrapper(CacheablePageContent(page))
+        return list(fn(page))
+
+    return wrapper_wrapper
+
+
+@with_cached_index_content
+def parse_links(page: "IndexContent") -> Iterable[Link]:
+    """
+    Parse a Simple API's Index Content, and yield its anchor elements as Link objects.
+    """
+
+    content_type_l = page.content_type.lower()
+    if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
+        data = json.loads(page.content)
+        for file in data.get("files", []):
+            link = Link.from_json(file, page.url)
+            if link is None:
+                continue
+            yield link
+        return
+
+    parser = HTMLLinkParser(page.url)
+    encoding = page.encoding or "utf-8"
+    parser.feed(page.content.decode(encoding))
+
+    url = page.url
+    base_url = parser.base_url or url
+    for anchor in parser.anchors:
+        link = Link.from_element(anchor, page_url=url, base_url=base_url)
+        if link is None:
+            continue
+        yield link
+
+
+class IndexContent:
+    """Represents one response (or page), along with its URL"""
+
+    def __init__(
+        self,
+        content: bytes,
+        content_type: str,
+        encoding: Optional[str],
+        url: str,
+        cache_link_parsing: bool = True,
+    ) -> None:
+        """
+        :param encoding: the encoding to decode the given content.
+        :param url: the URL from which the HTML was downloaded.
+        :param cache_link_parsing: whether links parsed from this page's url
+                                   should be cached. PyPI index urls should
+                                   have this set to False, for example.
+        """
+        self.content = content
+        self.content_type = content_type
+        self.encoding = encoding
+        self.url = url
+        self.cache_link_parsing = cache_link_parsing
+
+    def __str__(self) -> str:
+        return redact_auth_from_url(self.url)
+
+
+class HTMLLinkParser(HTMLParser):
+    """
+    HTMLParser that keeps the first base HREF and a list of all anchor
+    elements' attributes.
+    """
+
+    def __init__(self, url: str) -> None:
+        super().__init__(convert_charrefs=True)
+
+        self.url: str = url
+        self.base_url: Optional[str] = None
+        self.anchors: List[Dict[str, Optional[str]]] = []
+
+    def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
+        if tag == "base" and self.base_url is None:
+            href = self.get_href(attrs)
+            if href is not None:
+                self.base_url = href
+        elif tag == "a":
+            self.anchors.append(dict(attrs))
+
+    def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
+        for name, value in attrs:
+            if name == "href":
+                return value
+        return None
+
+
+def _handle_get_simple_fail(
+    link: Link,
+    reason: Union[str, Exception],
+    meth: Optional[Callable[..., None]] = None,
+) -> None:
+    if meth is None:
+        meth = logger.debug
+    meth("Could not fetch URL %s: %s - skipping", link, reason)
+
+
+def _make_index_content(
+    response: Response, cache_link_parsing: bool = True
+) -> IndexContent:
+    encoding = _get_encoding_from_headers(response.headers)
+    return IndexContent(
+        response.content,
+        response.headers["Content-Type"],
+        encoding=encoding,
+        url=response.url,
+        cache_link_parsing=cache_link_parsing,
+    )
+
+
+def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:
+    url = link.url.split("#", 1)[0]
+
+    # Check for VCS schemes that do not support lookup as web pages.
+    vcs_scheme = _match_vcs_scheme(url)
+    if vcs_scheme:
+        logger.warning(
+            "Cannot look at %s URL %s because it does not support lookup as web pages.",
+            vcs_scheme,
+            link,
+        )
+        return None
+
+    # Tack index.html onto file:// URLs that point to directories
+    scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
+    if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
+        # add trailing slash if not present so urljoin doesn't trim
+        # final segment
+        if not url.endswith("/"):
+            url += "/"
+        # TODO: In the future, it would be nice if pip supported PEP 691
+        #       style respones in the file:// URLs, however there's no
+        #       standard file extension for application/vnd.pypi.simple.v1+json
+        #       so we'll need to come up with something on our own.
+        url = urllib.parse.urljoin(url, "index.html")
+        logger.debug(" file: URL is directory, getting %s", url)
+
+    try:
+        resp = _get_simple_response(url, session=session)
+    except _NotHTTP:
+        logger.warning(
+            "Skipping page %s because it looks like an archive, and cannot "
+            "be checked by a HTTP HEAD request.",
+            link,
+        )
+    except _NotAPIContent as exc:
+        logger.warning(
+            "Skipping page %s because the %s request got Content-Type: %s. "
+            "The only supported Content-Types are application/vnd.pypi.simple.v1+json, "
+            "application/vnd.pypi.simple.v1+html, and text/html",
+            link,
+            exc.request_desc,
+            exc.content_type,
+        )
+    except NetworkConnectionError as exc:
+        _handle_get_simple_fail(link, exc)
+    except RetryError as exc:
+        _handle_get_simple_fail(link, exc)
+    except SSLError as exc:
+        reason = "There was a problem confirming the ssl certificate: "
+        reason += str(exc)
+        _handle_get_simple_fail(link, reason, meth=logger.info)
+    except requests.ConnectionError as exc:
+        _handle_get_simple_fail(link, f"connection error: {exc}")
+    except requests.Timeout:
+        _handle_get_simple_fail(link, "timed out")
+    else:
+        return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)
+    return None
+
+
+class CollectedSources(NamedTuple):
+    find_links: Sequence[Optional[LinkSource]]
+    index_urls: Sequence[Optional[LinkSource]]
+
+
+class LinkCollector:
+
+    """
+    Responsible for collecting Link objects from all configured locations,
+    making network requests as needed.
+
+    The class's main method is its collect_sources() method.
+    """
+
+    def __init__(
+        self,
+        session: PipSession,
+        search_scope: SearchScope,
+    ) -> None:
+        self.search_scope = search_scope
+        self.session = session
+
+    @classmethod
+    def create(
+        cls,
+        session: PipSession,
+        options: Values,
+        suppress_no_index: bool = False,
+    ) -> "LinkCollector":
+        """
+        :param session: The Session to use to make requests.
+        :param suppress_no_index: Whether to ignore the --no-index option
+            when constructing the SearchScope object.
+        """
+        index_urls = [options.index_url] + options.extra_index_urls
+        if options.no_index and not suppress_no_index:
+            logger.debug(
+                "Ignoring indexes: %s",
+                ",".join(redact_auth_from_url(url) for url in index_urls),
+            )
+            index_urls = []
+
+        # Make sure find_links is a list before passing to create().
+        find_links = options.find_links or []
+
+        search_scope = SearchScope.create(
+            find_links=find_links,
+            index_urls=index_urls,
+            no_index=options.no_index,
+        )
+        link_collector = LinkCollector(
+            session=session,
+            search_scope=search_scope,
+        )
+        return link_collector
+
+    @property
+    def find_links(self) -> List[str]:
+        return self.search_scope.find_links
+
+    def fetch_response(self, location: Link) -> Optional[IndexContent]:
+        """
+        Fetch an HTML page containing package links.
+        """
+        return _get_index_content(location, session=self.session)
+
+    def collect_sources(
+        self,
+        project_name: str,
+        candidates_from_page: CandidatesFromPage,
+    ) -> CollectedSources:
+        # The OrderedDict calls deduplicate sources by URL.
+        index_url_sources = collections.OrderedDict(
+            build_source(
+                loc,
+                candidates_from_page=candidates_from_page,
+                page_validator=self.session.is_secure_origin,
+                expand_dir=False,
+                cache_link_parsing=False,
+            )
+            for loc in self.search_scope.get_index_urls_locations(project_name)
+        ).values()
+        find_links_sources = collections.OrderedDict(
+            build_source(
+                loc,
+                candidates_from_page=candidates_from_page,
+                page_validator=self.session.is_secure_origin,
+                expand_dir=True,
+                cache_link_parsing=True,
+            )
+            for loc in self.find_links
+        ).values()
+
+        if logger.isEnabledFor(logging.DEBUG):
+            lines = [
+                f"* {s.link}"
+                for s in itertools.chain(find_links_sources, index_url_sources)
+                if s is not None and s.link is not None
+            ]
+            lines = [
+                f"{len(lines)} location(s) to search "
+                f"for versions of {project_name}:"
+            ] + lines
+            logger.debug("\n".join(lines))
+
+        return CollectedSources(
+            find_links=list(find_links_sources),
+            index_urls=list(index_url_sources),
+        )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/index/package_finder.py b/venv/lib/python3.9/site-packages/pip/_internal/index/package_finder.py
new file mode 100644
index 0000000..9bf247f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/index/package_finder.py
@@ -0,0 +1,1025 @@
+"""Routines related to PyPI, indexes"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import enum
+import functools
+import itertools
+import logging
+import re
+from typing import FrozenSet, Iterable, List, Optional, Set, Tuple, Union
+
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.tags import Tag
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import _BaseVersion
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.exceptions import (
+    BestVersionAlreadyInstalled,
+    DistributionNotFound,
+    InvalidWheelFilename,
+    UnsupportedWheel,
+)
+from pip._internal.index.collector import LinkCollector, parse_links
+from pip._internal.models.candidate import InstallationCandidate
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.link import Link
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.models.wheel import Wheel
+from pip._internal.req import InstallRequirement
+from pip._internal.utils._log import getLogger
+from pip._internal.utils.filetypes import WHEEL_EXTENSION
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import build_netloc
+from pip._internal.utils.packaging import check_requires_python
+from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
+
+__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
+
+
+logger = getLogger(__name__)
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
+
+
+def _check_link_requires_python(
+    link: Link,
+    version_info: Tuple[int, int, int],
+    ignore_requires_python: bool = False,
+) -> bool:
+    """
+    Return whether the given Python version is compatible with a link's
+    "Requires-Python" value.
+
+    :param version_info: A 3-tuple of ints representing the Python
+        major-minor-micro version to check.
+    :param ignore_requires_python: Whether to ignore the "Requires-Python"
+        value if the given Python version isn't compatible.
+    """
+    try:
+        is_compatible = check_requires_python(
+            link.requires_python,
+            version_info=version_info,
+        )
+    except specifiers.InvalidSpecifier:
+        logger.debug(
+            "Ignoring invalid Requires-Python (%r) for link: %s",
+            link.requires_python,
+            link,
+        )
+    else:
+        if not is_compatible:
+            version = ".".join(map(str, version_info))
+            if not ignore_requires_python:
+                logger.verbose(
+                    "Link requires a different Python (%s not in: %r): %s",
+                    version,
+                    link.requires_python,
+                    link,
+                )
+                return False
+
+            logger.debug(
+                "Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
+                version,
+                link.requires_python,
+                link,
+            )
+
+    return True
+
+
+class LinkType(enum.Enum):
+    candidate = enum.auto()
+    different_project = enum.auto()
+    yanked = enum.auto()
+    format_unsupported = enum.auto()
+    format_invalid = enum.auto()
+    platform_mismatch = enum.auto()
+    requires_python_mismatch = enum.auto()
+
+
+class LinkEvaluator:
+
+    """
+    Responsible for evaluating links for a particular project.
+    """
+
+    _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
+
+    # Don't include an allow_yanked default value to make sure each call
+    # site considers whether yanked releases are allowed. This also causes
+    # that decision to be made explicit in the calling code, which helps
+    # people when reading the code.
+    def __init__(
+        self,
+        project_name: str,
+        canonical_name: str,
+        formats: FrozenSet[str],
+        target_python: TargetPython,
+        allow_yanked: bool,
+        ignore_requires_python: Optional[bool] = None,
+    ) -> None:
+        """
+        :param project_name: The user supplied package name.
+        :param canonical_name: The canonical package name.
+        :param formats: The formats allowed for this package. Should be a set
+            with 'binary' or 'source' or both in it.
+        :param target_python: The target Python interpreter to use when
+            evaluating link compatibility. This is used, for example, to
+            check wheel compatibility, as well as when checking the Python
+            version, e.g. the Python version embedded in a link filename
+            (or egg fragment) and against an HTML link's optional PEP 503
+            "data-requires-python" attribute.
+        :param allow_yanked: Whether files marked as yanked (in the sense
+            of PEP 592) are permitted to be candidates for install.
+        :param ignore_requires_python: Whether to ignore incompatible
+            PEP 503 "data-requires-python" values in HTML links. Defaults
+            to False.
+        """
+        if ignore_requires_python is None:
+            ignore_requires_python = False
+
+        self._allow_yanked = allow_yanked
+        self._canonical_name = canonical_name
+        self._ignore_requires_python = ignore_requires_python
+        self._formats = formats
+        self._target_python = target_python
+
+        self.project_name = project_name
+
+    def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:
+        """
+        Determine whether a link is a candidate for installation.
+
+        :return: A tuple (result, detail), where *result* is an enum
+            representing whether the evaluation found a candidate, or the reason
+            why one is not found. If a candidate is found, *detail* will be the
+            candidate's version string; if one is not found, it contains the
+            reason the link fails to qualify.
+        """
+        version = None
+        if link.is_yanked and not self._allow_yanked:
+            reason = link.yanked_reason or "<none given>"
+            return (LinkType.yanked, f"yanked for reason: {reason}")
+
+        if link.egg_fragment:
+            egg_info = link.egg_fragment
+            ext = link.ext
+        else:
+            egg_info, ext = link.splitext()
+            if not ext:
+                return (LinkType.format_unsupported, "not a file")
+            if ext not in SUPPORTED_EXTENSIONS:
+                return (
+                    LinkType.format_unsupported,
+                    f"unsupported archive format: {ext}",
+                )
+            if "binary" not in self._formats and ext == WHEEL_EXTENSION:
+                reason = f"No binaries permitted for {self.project_name}"
+                return (LinkType.format_unsupported, reason)
+            if "macosx10" in link.path and ext == ".zip":
+                return (LinkType.format_unsupported, "macosx10 one")
+            if ext == WHEEL_EXTENSION:
+                try:
+                    wheel = Wheel(link.filename)
+                except InvalidWheelFilename:
+                    return (
+                        LinkType.format_invalid,
+                        "invalid wheel filename",
+                    )
+                if canonicalize_name(wheel.name) != self._canonical_name:
+                    reason = f"wrong project name (not {self.project_name})"
+                    return (LinkType.different_project, reason)
+
+                supported_tags = self._target_python.get_tags()
+                if not wheel.supported(supported_tags):
+                    # Include the wheel's tags in the reason string to
+                    # simplify troubleshooting compatibility issues.
+                    file_tags = ", ".join(wheel.get_formatted_file_tags())
+                    reason = (
+                        f"none of the wheel's tags ({file_tags}) are compatible "
+                        f"(run pip debug --verbose to show compatible tags)"
+                    )
+                    return (LinkType.platform_mismatch, reason)
+
+                version = wheel.version
+
+        # This should be up by the self.ok_binary check, but see issue 2700.
+        if "source" not in self._formats and ext != WHEEL_EXTENSION:
+            reason = f"No sources permitted for {self.project_name}"
+            return (LinkType.format_unsupported, reason)
+
+        if not version:
+            version = _extract_version_from_fragment(
+                egg_info,
+                self._canonical_name,
+            )
+        if not version:
+            reason = f"Missing project version for {self.project_name}"
+            return (LinkType.format_invalid, reason)
+
+        match = self._py_version_re.search(version)
+        if match:
+            version = version[: match.start()]
+            py_version = match.group(1)
+            if py_version != self._target_python.py_version:
+                return (
+                    LinkType.platform_mismatch,
+                    "Python version is incorrect",
+                )
+
+        supports_python = _check_link_requires_python(
+            link,
+            version_info=self._target_python.py_version_info,
+            ignore_requires_python=self._ignore_requires_python,
+        )
+        if not supports_python:
+            reason = f"{version} Requires-Python {link.requires_python}"
+            return (LinkType.requires_python_mismatch, reason)
+
+        logger.debug("Found link %s, version: %s", link, version)
+
+        return (LinkType.candidate, version)
+
+
+def filter_unallowed_hashes(
+    candidates: List[InstallationCandidate],
+    hashes: Hashes,
+    project_name: str,
+) -> List[InstallationCandidate]:
+    """
+    Filter out candidates whose hashes aren't allowed, and return a new
+    list of candidates.
+
+    If at least one candidate has an allowed hash, then all candidates with
+    either an allowed hash or no hash specified are returned.  Otherwise,
+    the given candidates are returned.
+
+    Including the candidates with no hash specified when there is a match
+    allows a warning to be logged if there is a more preferred candidate
+    with no hash specified.  Returning all candidates in the case of no
+    matches lets pip report the hash of the candidate that would otherwise
+    have been installed (e.g. permitting the user to more easily update
+    their requirements file with the desired hash).
+    """
+    if not hashes:
+        logger.debug(
+            "Given no hashes to check %s links for project %r: "
+            "discarding no candidates",
+            len(candidates),
+            project_name,
+        )
+        # Make sure we're not returning back the given value.
+        return list(candidates)
+
+    matches_or_no_digest = []
+    # Collect the non-matches for logging purposes.
+    non_matches = []
+    match_count = 0
+    for candidate in candidates:
+        link = candidate.link
+        if not link.has_hash:
+            pass
+        elif link.is_hash_allowed(hashes=hashes):
+            match_count += 1
+        else:
+            non_matches.append(candidate)
+            continue
+
+        matches_or_no_digest.append(candidate)
+
+    if match_count:
+        filtered = matches_or_no_digest
+    else:
+        # Make sure we're not returning back the given value.
+        filtered = list(candidates)
+
+    if len(filtered) == len(candidates):
+        discard_message = "discarding no candidates"
+    else:
+        discard_message = "discarding {} non-matches:\n  {}".format(
+            len(non_matches),
+            "\n  ".join(str(candidate.link) for candidate in non_matches),
+        )
+
+    logger.debug(
+        "Checked %s links for project %r against %s hashes "
+        "(%s matches, %s no digest): %s",
+        len(candidates),
+        project_name,
+        hashes.digest_count,
+        match_count,
+        len(matches_or_no_digest) - match_count,
+        discard_message,
+    )
+
+    return filtered
+
+
+class CandidatePreferences:
+
+    """
+    Encapsulates some of the preferences for filtering and sorting
+    InstallationCandidate objects.
+    """
+
+    def __init__(
+        self,
+        prefer_binary: bool = False,
+        allow_all_prereleases: bool = False,
+    ) -> None:
+        """
+        :param allow_all_prereleases: Whether to allow all pre-releases.
+        """
+        self.allow_all_prereleases = allow_all_prereleases
+        self.prefer_binary = prefer_binary
+
+
+class BestCandidateResult:
+    """A collection of candidates, returned by `PackageFinder.find_best_candidate`.
+
+    This class is only intended to be instantiated by CandidateEvaluator's
+    `compute_best_candidate()` method.
+    """
+
+    def __init__(
+        self,
+        candidates: List[InstallationCandidate],
+        applicable_candidates: List[InstallationCandidate],
+        best_candidate: Optional[InstallationCandidate],
+    ) -> None:
+        """
+        :param candidates: A sequence of all available candidates found.
+        :param applicable_candidates: The applicable candidates.
+        :param best_candidate: The most preferred candidate found, or None
+            if no applicable candidates were found.
+        """
+        assert set(applicable_candidates) <= set(candidates)
+
+        if best_candidate is None:
+            assert not applicable_candidates
+        else:
+            assert best_candidate in applicable_candidates
+
+        self._applicable_candidates = applicable_candidates
+        self._candidates = candidates
+
+        self.best_candidate = best_candidate
+
+    def iter_all(self) -> Iterable[InstallationCandidate]:
+        """Iterate through all candidates."""
+        return iter(self._candidates)
+
+    def iter_applicable(self) -> Iterable[InstallationCandidate]:
+        """Iterate through the applicable candidates."""
+        return iter(self._applicable_candidates)
+
+
+class CandidateEvaluator:
+
+    """
+    Responsible for filtering and sorting candidates for installation based
+    on what tags are valid.
+    """
+
+    @classmethod
+    def create(
+        cls,
+        project_name: str,
+        target_python: Optional[TargetPython] = None,
+        prefer_binary: bool = False,
+        allow_all_prereleases: bool = False,
+        specifier: Optional[specifiers.BaseSpecifier] = None,
+        hashes: Optional[Hashes] = None,
+    ) -> "CandidateEvaluator":
+        """Create a CandidateEvaluator object.
+
+        :param target_python: The target Python interpreter to use when
+            checking compatibility. If None (the default), a TargetPython
+            object will be constructed from the running Python.
+        :param specifier: An optional object implementing `filter`
+            (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+            versions.
+        :param hashes: An optional collection of allowed hashes.
+        """
+        if target_python is None:
+            target_python = TargetPython()
+        if specifier is None:
+            specifier = specifiers.SpecifierSet()
+
+        supported_tags = target_python.get_tags()
+
+        return cls(
+            project_name=project_name,
+            supported_tags=supported_tags,
+            specifier=specifier,
+            prefer_binary=prefer_binary,
+            allow_all_prereleases=allow_all_prereleases,
+            hashes=hashes,
+        )
+
+    def __init__(
+        self,
+        project_name: str,
+        supported_tags: List[Tag],
+        specifier: specifiers.BaseSpecifier,
+        prefer_binary: bool = False,
+        allow_all_prereleases: bool = False,
+        hashes: Optional[Hashes] = None,
+    ) -> None:
+        """
+        :param supported_tags: The PEP 425 tags supported by the target
+            Python in order of preference (most preferred first).
+        """
+        self._allow_all_prereleases = allow_all_prereleases
+        self._hashes = hashes
+        self._prefer_binary = prefer_binary
+        self._project_name = project_name
+        self._specifier = specifier
+        self._supported_tags = supported_tags
+        # Since the index of the tag in the _supported_tags list is used
+        # as a priority, precompute a map from tag to index/priority to be
+        # used in wheel.find_most_preferred_tag.
+        self._wheel_tag_preferences = {
+            tag: idx for idx, tag in enumerate(supported_tags)
+        }
+
+    def get_applicable_candidates(
+        self,
+        candidates: List[InstallationCandidate],
+    ) -> List[InstallationCandidate]:
+        """
+        Return the applicable candidates from a list of candidates.
+        """
+        # Using None infers from the specifier instead.
+        allow_prereleases = self._allow_all_prereleases or None
+        specifier = self._specifier
+        versions = {
+            str(v)
+            for v in specifier.filter(
+                # We turn the version object into a str here because otherwise
+                # when we're debundled but setuptools isn't, Python will see
+                # packaging.version.Version and
+                # pkg_resources._vendor.packaging.version.Version as different
+                # types. This way we'll use a str as a common data interchange
+                # format. If we stop using the pkg_resources provided specifier
+                # and start using our own, we can drop the cast to str().
+                (str(c.version) for c in candidates),
+                prereleases=allow_prereleases,
+            )
+        }
+
+        # Again, converting version to str to deal with debundling.
+        applicable_candidates = [c for c in candidates if str(c.version) in versions]
+
+        filtered_applicable_candidates = filter_unallowed_hashes(
+            candidates=applicable_candidates,
+            hashes=self._hashes,
+            project_name=self._project_name,
+        )
+
+        return sorted(filtered_applicable_candidates, key=self._sort_key)
+
+    def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:
+        """
+        Function to pass as the `key` argument to a call to sorted() to sort
+        InstallationCandidates by preference.
+
+        Returns a tuple such that tuples sorting as greater using Python's
+        default comparison operator are more preferred.
+
+        The preference is as follows:
+
+        First and foremost, candidates with allowed (matching) hashes are
+        always preferred over candidates without matching hashes. This is
+        because e.g. if the only candidate with an allowed hash is yanked,
+        we still want to use that candidate.
+
+        Second, excepting hash considerations, candidates that have been
+        yanked (in the sense of PEP 592) are always less preferred than
+        candidates that haven't been yanked. Then:
+
+        If not finding wheels, they are sorted by version only.
+        If finding wheels, then the sort order is by version, then:
+          1. existing installs
+          2. wheels ordered via Wheel.support_index_min(self._supported_tags)
+          3. source archives
+        If prefer_binary was set, then all wheels are sorted above sources.
+
+        Note: it was considered to embed this logic into the Link
+              comparison operators, but then different sdist links
+              with the same version, would have to be considered equal
+        """
+        valid_tags = self._supported_tags
+        support_num = len(valid_tags)
+        build_tag: BuildTag = ()
+        binary_preference = 0
+        link = candidate.link
+        if link.is_wheel:
+            # can raise InvalidWheelFilename
+            wheel = Wheel(link.filename)
+            try:
+                pri = -(
+                    wheel.find_most_preferred_tag(
+                        valid_tags, self._wheel_tag_preferences
+                    )
+                )
+            except ValueError:
+                raise UnsupportedWheel(
+                    "{} is not a supported wheel for this platform. It "
+                    "can't be sorted.".format(wheel.filename)
+                )
+            if self._prefer_binary:
+                binary_preference = 1
+            if wheel.build_tag is not None:
+                match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
+                build_tag_groups = match.groups()
+                build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
+        else:  # sdist
+            pri = -(support_num)
+        has_allowed_hash = int(link.is_hash_allowed(self._hashes))
+        yank_value = -1 * int(link.is_yanked)  # -1 for yanked.
+        return (
+            has_allowed_hash,
+            yank_value,
+            binary_preference,
+            candidate.version,
+            pri,
+            build_tag,
+        )
+
+    def sort_best_candidate(
+        self,
+        candidates: List[InstallationCandidate],
+    ) -> Optional[InstallationCandidate]:
+        """
+        Return the best candidate per the instance's sort order, or None if
+        no candidate is acceptable.
+        """
+        if not candidates:
+            return None
+        best_candidate = max(candidates, key=self._sort_key)
+        return best_candidate
+
+    def compute_best_candidate(
+        self,
+        candidates: List[InstallationCandidate],
+    ) -> BestCandidateResult:
+        """
+        Compute and return a `BestCandidateResult` instance.
+        """
+        applicable_candidates = self.get_applicable_candidates(candidates)
+
+        best_candidate = self.sort_best_candidate(applicable_candidates)
+
+        return BestCandidateResult(
+            candidates,
+            applicable_candidates=applicable_candidates,
+            best_candidate=best_candidate,
+        )
+
+
+class PackageFinder:
+    """This finds packages.
+
+    This is meant to match easy_install's technique for looking for
+    packages, by reading pages and looking for appropriate links.
+    """
+
+    def __init__(
+        self,
+        link_collector: LinkCollector,
+        target_python: TargetPython,
+        allow_yanked: bool,
+        format_control: Optional[FormatControl] = None,
+        candidate_prefs: Optional[CandidatePreferences] = None,
+        ignore_requires_python: Optional[bool] = None,
+    ) -> None:
+        """
+        This constructor is primarily meant to be used by the create() class
+        method and from tests.
+
+        :param format_control: A FormatControl object, used to control
+            the selection of source packages / binary packages when consulting
+            the index and links.
+        :param candidate_prefs: Options to use when creating a
+            CandidateEvaluator object.
+        """
+        if candidate_prefs is None:
+            candidate_prefs = CandidatePreferences()
+
+        format_control = format_control or FormatControl(set(), set())
+
+        self._allow_yanked = allow_yanked
+        self._candidate_prefs = candidate_prefs
+        self._ignore_requires_python = ignore_requires_python
+        self._link_collector = link_collector
+        self._target_python = target_python
+
+        self.format_control = format_control
+
+        # These are boring links that have already been logged somehow.
+        self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
+
+    # Don't include an allow_yanked default value to make sure each call
+    # site considers whether yanked releases are allowed. This also causes
+    # that decision to be made explicit in the calling code, which helps
+    # people when reading the code.
+    @classmethod
+    def create(
+        cls,
+        link_collector: LinkCollector,
+        selection_prefs: SelectionPreferences,
+        target_python: Optional[TargetPython] = None,
+    ) -> "PackageFinder":
+        """Create a PackageFinder.
+
+        :param selection_prefs: The candidate selection preferences, as a
+            SelectionPreferences object.
+        :param target_python: The target Python interpreter to use when
+            checking compatibility. If None (the default), a TargetPython
+            object will be constructed from the running Python.
+        """
+        if target_python is None:
+            target_python = TargetPython()
+
+        candidate_prefs = CandidatePreferences(
+            prefer_binary=selection_prefs.prefer_binary,
+            allow_all_prereleases=selection_prefs.allow_all_prereleases,
+        )
+
+        return cls(
+            candidate_prefs=candidate_prefs,
+            link_collector=link_collector,
+            target_python=target_python,
+            allow_yanked=selection_prefs.allow_yanked,
+            format_control=selection_prefs.format_control,
+            ignore_requires_python=selection_prefs.ignore_requires_python,
+        )
+
+    @property
+    def target_python(self) -> TargetPython:
+        return self._target_python
+
+    @property
+    def search_scope(self) -> SearchScope:
+        return self._link_collector.search_scope
+
+    @search_scope.setter
+    def search_scope(self, search_scope: SearchScope) -> None:
+        self._link_collector.search_scope = search_scope
+
+    @property
+    def find_links(self) -> List[str]:
+        return self._link_collector.find_links
+
+    @property
+    def index_urls(self) -> List[str]:
+        return self.search_scope.index_urls
+
+    @property
+    def trusted_hosts(self) -> Iterable[str]:
+        for host_port in self._link_collector.session.pip_trusted_origins:
+            yield build_netloc(*host_port)
+
+    @property
+    def allow_all_prereleases(self) -> bool:
+        return self._candidate_prefs.allow_all_prereleases
+
+    def set_allow_all_prereleases(self) -> None:
+        self._candidate_prefs.allow_all_prereleases = True
+
+    @property
+    def prefer_binary(self) -> bool:
+        return self._candidate_prefs.prefer_binary
+
+    def set_prefer_binary(self) -> None:
+        self._candidate_prefs.prefer_binary = True
+
+    def requires_python_skipped_reasons(self) -> List[str]:
+        reasons = {
+            detail
+            for _, result, detail in self._logged_links
+            if result == LinkType.requires_python_mismatch
+        }
+        return sorted(reasons)
+
+    def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
+        canonical_name = canonicalize_name(project_name)
+        formats = self.format_control.get_allowed_formats(canonical_name)
+
+        return LinkEvaluator(
+            project_name=project_name,
+            canonical_name=canonical_name,
+            formats=formats,
+            target_python=self._target_python,
+            allow_yanked=self._allow_yanked,
+            ignore_requires_python=self._ignore_requires_python,
+        )
+
+    def _sort_links(self, links: Iterable[Link]) -> List[Link]:
+        """
+        Returns elements of links in order, non-egg links first, egg links
+        second, while eliminating duplicates
+        """
+        eggs, no_eggs = [], []
+        seen: Set[Link] = set()
+        for link in links:
+            if link not in seen:
+                seen.add(link)
+                if link.egg_fragment:
+                    eggs.append(link)
+                else:
+                    no_eggs.append(link)
+        return no_eggs + eggs
+
+    def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
+        entry = (link, result, detail)
+        if entry not in self._logged_links:
+            # Put the link at the end so the reason is more visible and because
+            # the link string is usually very long.
+            logger.debug("Skipping link: %s: %s", detail, link)
+            self._logged_links.add(entry)
+
+    def get_install_candidate(
+        self, link_evaluator: LinkEvaluator, link: Link
+    ) -> Optional[InstallationCandidate]:
+        """
+        If the link is a candidate for install, convert it to an
+        InstallationCandidate and return it. Otherwise, return None.
+        """
+        result, detail = link_evaluator.evaluate_link(link)
+        if result != LinkType.candidate:
+            self._log_skipped_link(link, result, detail)
+            return None
+
+        return InstallationCandidate(
+            name=link_evaluator.project_name,
+            link=link,
+            version=detail,
+        )
+
+    def evaluate_links(
+        self, link_evaluator: LinkEvaluator, links: Iterable[Link]
+    ) -> List[InstallationCandidate]:
+        """
+        Convert links that are candidates to InstallationCandidate objects.
+        """
+        candidates = []
+        for link in self._sort_links(links):
+            candidate = self.get_install_candidate(link_evaluator, link)
+            if candidate is not None:
+                candidates.append(candidate)
+
+        return candidates
+
+    def process_project_url(
+        self, project_url: Link, link_evaluator: LinkEvaluator
+    ) -> List[InstallationCandidate]:
+        logger.debug(
+            "Fetching project page and analyzing links: %s",
+            project_url,
+        )
+        index_response = self._link_collector.fetch_response(project_url)
+        if index_response is None:
+            return []
+
+        page_links = list(parse_links(index_response))
+
+        with indent_log():
+            package_links = self.evaluate_links(
+                link_evaluator,
+                links=page_links,
+            )
+
+        return package_links
+
+    @functools.lru_cache(maxsize=None)
+    def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
+        """Find all available InstallationCandidate for project_name
+
+        This checks index_urls and find_links.
+        All versions found are returned as an InstallationCandidate list.
+
+        See LinkEvaluator.evaluate_link() for details on which files
+        are accepted.
+        """
+        link_evaluator = self.make_link_evaluator(project_name)
+
+        collected_sources = self._link_collector.collect_sources(
+            project_name=project_name,
+            candidates_from_page=functools.partial(
+                self.process_project_url,
+                link_evaluator=link_evaluator,
+            ),
+        )
+
+        page_candidates_it = itertools.chain.from_iterable(
+            source.page_candidates()
+            for sources in collected_sources
+            for source in sources
+            if source is not None
+        )
+        page_candidates = list(page_candidates_it)
+
+        file_links_it = itertools.chain.from_iterable(
+            source.file_links()
+            for sources in collected_sources
+            for source in sources
+            if source is not None
+        )
+        file_candidates = self.evaluate_links(
+            link_evaluator,
+            sorted(file_links_it, reverse=True),
+        )
+
+        if logger.isEnabledFor(logging.DEBUG) and file_candidates:
+            paths = []
+            for candidate in file_candidates:
+                assert candidate.link.url  # we need to have a URL
+                try:
+                    paths.append(candidate.link.file_path)
+                except Exception:
+                    paths.append(candidate.link.url)  # it's not a local file
+
+            logger.debug("Local files found: %s", ", ".join(paths))
+
+        # This is an intentional priority ordering
+        return file_candidates + page_candidates
+
+    def make_candidate_evaluator(
+        self,
+        project_name: str,
+        specifier: Optional[specifiers.BaseSpecifier] = None,
+        hashes: Optional[Hashes] = None,
+    ) -> CandidateEvaluator:
+        """Create a CandidateEvaluator object to use."""
+        candidate_prefs = self._candidate_prefs
+        return CandidateEvaluator.create(
+            project_name=project_name,
+            target_python=self._target_python,
+            prefer_binary=candidate_prefs.prefer_binary,
+            allow_all_prereleases=candidate_prefs.allow_all_prereleases,
+            specifier=specifier,
+            hashes=hashes,
+        )
+
+    @functools.lru_cache(maxsize=None)
+    def find_best_candidate(
+        self,
+        project_name: str,
+        specifier: Optional[specifiers.BaseSpecifier] = None,
+        hashes: Optional[Hashes] = None,
+    ) -> BestCandidateResult:
+        """Find matches for the given project and specifier.
+
+        :param specifier: An optional object implementing `filter`
+            (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+            versions.
+
+        :return: A `BestCandidateResult` instance.
+        """
+        candidates = self.find_all_candidates(project_name)
+        candidate_evaluator = self.make_candidate_evaluator(
+            project_name=project_name,
+            specifier=specifier,
+            hashes=hashes,
+        )
+        return candidate_evaluator.compute_best_candidate(candidates)
+
+    def find_requirement(
+        self, req: InstallRequirement, upgrade: bool
+    ) -> Optional[InstallationCandidate]:
+        """Try to find a Link matching req
+
+        Expects req, an InstallRequirement and upgrade, a boolean
+        Returns a InstallationCandidate if found,
+        Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
+        """
+        hashes = req.hashes(trust_internet=False)
+        best_candidate_result = self.find_best_candidate(
+            req.name,
+            specifier=req.specifier,
+            hashes=hashes,
+        )
+        best_candidate = best_candidate_result.best_candidate
+
+        installed_version: Optional[_BaseVersion] = None
+        if req.satisfied_by is not None:
+            installed_version = req.satisfied_by.version
+
+        def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
+            # This repeated parse_version and str() conversion is needed to
+            # handle different vendoring sources from pip and pkg_resources.
+            # If we stop using the pkg_resources provided specifier and start
+            # using our own, we can drop the cast to str().
+            return (
+                ", ".join(
+                    sorted(
+                        {str(c.version) for c in cand_iter},
+                        key=parse_version,
+                    )
+                )
+                or "none"
+            )
+
+        if installed_version is None and best_candidate is None:
+            logger.critical(
+                "Could not find a version that satisfies the requirement %s "
+                "(from versions: %s)",
+                req,
+                _format_versions(best_candidate_result.iter_all()),
+            )
+
+            raise DistributionNotFound(
+                "No matching distribution found for {}".format(req)
+            )
+
+        best_installed = False
+        if installed_version and (
+            best_candidate is None or best_candidate.version <= installed_version
+        ):
+            best_installed = True
+
+        if not upgrade and installed_version is not None:
+            if best_installed:
+                logger.debug(
+                    "Existing installed version (%s) is most up-to-date and "
+                    "satisfies requirement",
+                    installed_version,
+                )
+            else:
+                logger.debug(
+                    "Existing installed version (%s) satisfies requirement "
+                    "(most up-to-date version is %s)",
+                    installed_version,
+                    best_candidate.version,
+                )
+            return None
+
+        if best_installed:
+            # We have an existing version, and its the best version
+            logger.debug(
+                "Installed version (%s) is most up-to-date (past versions: %s)",
+                installed_version,
+                _format_versions(best_candidate_result.iter_applicable()),
+            )
+            raise BestVersionAlreadyInstalled
+
+        logger.debug(
+            "Using version %s (newest of versions: %s)",
+            best_candidate.version,
+            _format_versions(best_candidate_result.iter_applicable()),
+        )
+        return best_candidate
+
+
+def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
+    """Find the separator's index based on the package's canonical name.
+
+    :param fragment: A <package>+<version> filename "fragment" (stem) or
+        egg fragment.
+    :param canonical_name: The package's canonical name.
+
+    This function is needed since the canonicalized name does not necessarily
+    have the same length as the egg info's name part. An example::
+
+    >>> fragment = 'foo__bar-1.0'
+    >>> canonical_name = 'foo-bar'
+    >>> _find_name_version_sep(fragment, canonical_name)
+    8
+    """
+    # Project name and version must be separated by one single dash. Find all
+    # occurrences of dashes; if the string in front of it matches the canonical
+    # name, this is the one separating the name and version parts.
+    for i, c in enumerate(fragment):
+        if c != "-":
+            continue
+        if canonicalize_name(fragment[:i]) == canonical_name:
+            return i
+    raise ValueError(f"{fragment} does not match {canonical_name}")
+
+
+def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:
+    """Parse the version string from a <package>+<version> filename
+    "fragment" (stem) or egg fragment.
+
+    :param fragment: The string to parse. E.g. foo-2.1
+    :param canonical_name: The canonicalized name of the package this
+        belongs to.
+    """
+    try:
+        version_start = _find_name_version_sep(fragment, canonical_name) + 1
+    except ValueError:
+        return None
+    version = fragment[version_start:]
+    if not version:
+        return None
+    return version
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/index/sources.py b/venv/lib/python3.9/site-packages/pip/_internal/index/sources.py
new file mode 100644
index 0000000..eec3f12
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/index/sources.py
@@ -0,0 +1,224 @@
+import logging
+import mimetypes
+import os
+import pathlib
+from typing import Callable, Iterable, Optional, Tuple
+
+from pip._internal.models.candidate import InstallationCandidate
+from pip._internal.models.link import Link
+from pip._internal.utils.urls import path_to_url, url_to_path
+from pip._internal.vcs import is_url
+
+logger = logging.getLogger(__name__)
+
+FoundCandidates = Iterable[InstallationCandidate]
+FoundLinks = Iterable[Link]
+CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]
+PageValidator = Callable[[Link], bool]
+
+
+class LinkSource:
+    @property
+    def link(self) -> Optional[Link]:
+        """Returns the underlying link, if there's one."""
+        raise NotImplementedError()
+
+    def page_candidates(self) -> FoundCandidates:
+        """Candidates found by parsing an archive listing HTML file."""
+        raise NotImplementedError()
+
+    def file_links(self) -> FoundLinks:
+        """Links found by specifying archives directly."""
+        raise NotImplementedError()
+
+
+def _is_html_file(file_url: str) -> bool:
+    return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
+
+
+class _FlatDirectorySource(LinkSource):
+    """Link source specified by ``--find-links=<path-to-dir>``.
+
+    This looks the content of the directory, and returns:
+
+    * ``page_candidates``: Links listed on each HTML file in the directory.
+    * ``file_candidates``: Archives in the directory.
+    """
+
+    def __init__(
+        self,
+        candidates_from_page: CandidatesFromPage,
+        path: str,
+    ) -> None:
+        self._candidates_from_page = candidates_from_page
+        self._path = pathlib.Path(os.path.realpath(path))
+
+    @property
+    def link(self) -> Optional[Link]:
+        return None
+
+    def page_candidates(self) -> FoundCandidates:
+        for path in self._path.iterdir():
+            url = path_to_url(str(path))
+            if not _is_html_file(url):
+                continue
+            yield from self._candidates_from_page(Link(url))
+
+    def file_links(self) -> FoundLinks:
+        for path in self._path.iterdir():
+            url = path_to_url(str(path))
+            if _is_html_file(url):
+                continue
+            yield Link(url)
+
+
+class _LocalFileSource(LinkSource):
+    """``--find-links=<path-or-url>`` or ``--[extra-]index-url=<path-or-url>``.
+
+    If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to
+    the option, it is converted to a URL first. This returns:
+
+    * ``page_candidates``: Links listed on an HTML file.
+    * ``file_candidates``: The non-HTML file.
+    """
+
+    def __init__(
+        self,
+        candidates_from_page: CandidatesFromPage,
+        link: Link,
+    ) -> None:
+        self._candidates_from_page = candidates_from_page
+        self._link = link
+
+    @property
+    def link(self) -> Optional[Link]:
+        return self._link
+
+    def page_candidates(self) -> FoundCandidates:
+        if not _is_html_file(self._link.url):
+            return
+        yield from self._candidates_from_page(self._link)
+
+    def file_links(self) -> FoundLinks:
+        if _is_html_file(self._link.url):
+            return
+        yield self._link
+
+
+class _RemoteFileSource(LinkSource):
+    """``--find-links=<url>`` or ``--[extra-]index-url=<url>``.
+
+    This returns:
+
+    * ``page_candidates``: Links listed on an HTML file.
+    * ``file_candidates``: The non-HTML file.
+    """
+
+    def __init__(
+        self,
+        candidates_from_page: CandidatesFromPage,
+        page_validator: PageValidator,
+        link: Link,
+    ) -> None:
+        self._candidates_from_page = candidates_from_page
+        self._page_validator = page_validator
+        self._link = link
+
+    @property
+    def link(self) -> Optional[Link]:
+        return self._link
+
+    def page_candidates(self) -> FoundCandidates:
+        if not self._page_validator(self._link):
+            return
+        yield from self._candidates_from_page(self._link)
+
+    def file_links(self) -> FoundLinks:
+        yield self._link
+
+
+class _IndexDirectorySource(LinkSource):
+    """``--[extra-]index-url=<path-to-directory>``.
+
+    This is treated like a remote URL; ``candidates_from_page`` contains logic
+    for this by appending ``index.html`` to the link.
+    """
+
+    def __init__(
+        self,
+        candidates_from_page: CandidatesFromPage,
+        link: Link,
+    ) -> None:
+        self._candidates_from_page = candidates_from_page
+        self._link = link
+
+    @property
+    def link(self) -> Optional[Link]:
+        return self._link
+
+    def page_candidates(self) -> FoundCandidates:
+        yield from self._candidates_from_page(self._link)
+
+    def file_links(self) -> FoundLinks:
+        return ()
+
+
+def build_source(
+    location: str,
+    *,
+    candidates_from_page: CandidatesFromPage,
+    page_validator: PageValidator,
+    expand_dir: bool,
+    cache_link_parsing: bool,
+) -> Tuple[Optional[str], Optional[LinkSource]]:
+
+    path: Optional[str] = None
+    url: Optional[str] = None
+    if os.path.exists(location):  # Is a local path.
+        url = path_to_url(location)
+        path = location
+    elif location.startswith("file:"):  # A file: URL.
+        url = location
+        path = url_to_path(location)
+    elif is_url(location):
+        url = location
+
+    if url is None:
+        msg = (
+            "Location '%s' is ignored: "
+            "it is either a non-existing path or lacks a specific scheme."
+        )
+        logger.warning(msg, location)
+        return (None, None)
+
+    if path is None:
+        source: LinkSource = _RemoteFileSource(
+            candidates_from_page=candidates_from_page,
+            page_validator=page_validator,
+            link=Link(url, cache_link_parsing=cache_link_parsing),
+        )
+        return (url, source)
+
+    if os.path.isdir(path):
+        if expand_dir:
+            source = _FlatDirectorySource(
+                candidates_from_page=candidates_from_page,
+                path=path,
+            )
+        else:
+            source = _IndexDirectorySource(
+                candidates_from_page=candidates_from_page,
+                link=Link(url, cache_link_parsing=cache_link_parsing),
+            )
+        return (url, source)
+    elif os.path.isfile(path):
+        source = _LocalFileSource(
+            candidates_from_page=candidates_from_page,
+            link=Link(url, cache_link_parsing=cache_link_parsing),
+        )
+        return (url, source)
+    logger.warning(
+        "Location '%s' is ignored: it is neither a file nor a directory.",
+        location,
+    )
+    return (url, None)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/locations/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/locations/__init__.py
new file mode 100644
index 0000000..60afe0a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/locations/__init__.py
@@ -0,0 +1,528 @@
+import functools
+import logging
+import os
+import pathlib
+import sys
+import sysconfig
+from typing import Any, Dict, Generator, List, Optional, Tuple
+
+from pip._internal.models.scheme import SCHEME_KEYS, Scheme
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from . import _sysconfig
+from .base import (
+    USER_CACHE_DIR,
+    get_major_minor_version,
+    get_src_prefix,
+    is_osx_framework,
+    site_packages,
+    user_site,
+)
+
+__all__ = [
+    "USER_CACHE_DIR",
+    "get_bin_prefix",
+    "get_bin_user",
+    "get_major_minor_version",
+    "get_platlib",
+    "get_prefixed_libs",
+    "get_purelib",
+    "get_scheme",
+    "get_src_prefix",
+    "site_packages",
+    "user_site",
+]
+
+
+logger = logging.getLogger(__name__)
+
+
+_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
+
+_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
+
+
+def _should_use_sysconfig() -> bool:
+    """This function determines the value of _USE_SYSCONFIG.
+
+    By default, pip uses sysconfig on Python 3.10+.
+    But Python distributors can override this decision by setting:
+        sysconfig._PIP_USE_SYSCONFIG = True / False
+    Rationale in https://github.com/pypa/pip/issues/10647
+
+    This is a function for testability, but should be constant during any one
+    run.
+    """
+    return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
+
+
+_USE_SYSCONFIG = _should_use_sysconfig()
+
+if not _USE_SYSCONFIG:
+    # Import distutils lazily to avoid deprecation warnings,
+    # but import it soon enough that it is in memory and available during
+    # a pip reinstall.
+    from . import _distutils
+
+# Be noisy about incompatibilities if this platforms "should" be using
+# sysconfig, but is explicitly opting out and using distutils instead.
+if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
+    _MISMATCH_LEVEL = logging.WARNING
+else:
+    _MISMATCH_LEVEL = logging.DEBUG
+
+
+def _looks_like_bpo_44860() -> bool:
+    """The resolution to bpo-44860 will change this incorrect platlib.
+
+    See <https://bugs.python.org/issue44860>.
+    """
+    from distutils.command.install import INSTALL_SCHEMES
+
+    try:
+        unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
+    except KeyError:
+        return False
+    return unix_user_platlib == "$usersite"
+
+
+def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
+    platlib = scheme["platlib"]
+    if "/$platlibdir/" in platlib:
+        platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
+    if "/lib64/" not in platlib:
+        return False
+    unpatched = platlib.replace("/lib64/", "/lib/")
+    return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_red_hat_lib() -> bool:
+    """Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
+
+    This is the only way I can see to tell a Red Hat-patched Python.
+    """
+    from distutils.command.install import INSTALL_SCHEMES
+
+    return all(
+        k in INSTALL_SCHEMES
+        and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])
+        for k in ("unix_prefix", "unix_home")
+    )
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_debian_scheme() -> bool:
+    """Debian adds two additional schemes."""
+    from distutils.command.install import INSTALL_SCHEMES
+
+    return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_red_hat_scheme() -> bool:
+    """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
+
+    Red Hat's ``00251-change-user-install-location.patch`` changes the install
+    command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is
+    (fortunately?) done quite unconditionally, so we create a default command
+    object without any configuration to detect this.
+    """
+    from distutils.command.install import install
+    from distutils.dist import Distribution
+
+    cmd: Any = install(Distribution())
+    cmd.finalize_options()
+    return (
+        cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"
+        and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"
+    )
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_slackware_scheme() -> bool:
+    """Slackware patches sysconfig but fails to patch distutils and site.
+
+    Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
+    path, but does not do the same to the site module.
+    """
+    if user_site is None:  # User-site not available.
+        return False
+    try:
+        paths = sysconfig.get_paths(scheme="posix_user", expand=False)
+    except KeyError:  # User-site not available.
+        return False
+    return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
+
+
+@functools.lru_cache(maxsize=None)
+def _looks_like_msys2_mingw_scheme() -> bool:
+    """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
+
+    However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is
+    likely going to be included in their 3.10 release, so we ignore the warning.
+    See msys2/MINGW-packages#9319.
+
+    MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,
+    and is missing the final ``"site-packages"``.
+    """
+    paths = sysconfig.get_paths("nt", expand=False)
+    return all(
+        "Lib" not in p and "lib" in p and not p.endswith("site-packages")
+        for p in (paths[key] for key in ("platlib", "purelib"))
+    )
+
+
+def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
+    ldversion = sysconfig.get_config_var("LDVERSION")
+    abiflags = getattr(sys, "abiflags", None)
+
+    # LDVERSION does not end with sys.abiflags. Just return the path unchanged.
+    if not ldversion or not abiflags or not ldversion.endswith(abiflags):
+        yield from parts
+        return
+
+    # Strip sys.abiflags from LDVERSION-based path components.
+    for part in parts:
+        if part.endswith(ldversion):
+            part = part[: (0 - len(abiflags))]
+        yield part
+
+
+@functools.lru_cache(maxsize=None)
+def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
+    issue_url = "https://github.com/pypa/pip/issues/10151"
+    message = (
+        "Value for %s does not match. Please report this to <%s>"
+        "\ndistutils: %s"
+        "\nsysconfig: %s"
+    )
+    logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)
+
+
+def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
+    if old == new:
+        return False
+    _warn_mismatched(old, new, key=key)
+    return True
+
+
+@functools.lru_cache(maxsize=None)
+def _log_context(
+    *,
+    user: bool = False,
+    home: Optional[str] = None,
+    root: Optional[str] = None,
+    prefix: Optional[str] = None,
+) -> None:
+    parts = [
+        "Additional context:",
+        "user = %r",
+        "home = %r",
+        "root = %r",
+        "prefix = %r",
+    ]
+
+    logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)
+
+
+def get_scheme(
+    dist_name: str,
+    user: bool = False,
+    home: Optional[str] = None,
+    root: Optional[str] = None,
+    isolated: bool = False,
+    prefix: Optional[str] = None,
+) -> Scheme:
+    new = _sysconfig.get_scheme(
+        dist_name,
+        user=user,
+        home=home,
+        root=root,
+        isolated=isolated,
+        prefix=prefix,
+    )
+    if _USE_SYSCONFIG:
+        return new
+
+    old = _distutils.get_scheme(
+        dist_name,
+        user=user,
+        home=home,
+        root=root,
+        isolated=isolated,
+        prefix=prefix,
+    )
+
+    warning_contexts = []
+    for k in SCHEME_KEYS:
+        old_v = pathlib.Path(getattr(old, k))
+        new_v = pathlib.Path(getattr(new, k))
+
+        if old_v == new_v:
+            continue
+
+        # distutils incorrectly put PyPy packages under ``site-packages/python``
+        # in the ``posix_home`` scheme, but PyPy devs said they expect the
+        # directory name to be ``pypy`` instead. So we treat this as a bug fix
+        # and not warn about it. See bpo-43307 and python/cpython#24628.
+        skip_pypy_special_case = (
+            sys.implementation.name == "pypy"
+            and home is not None
+            and k in ("platlib", "purelib")
+            and old_v.parent == new_v.parent
+            and old_v.name.startswith("python")
+            and new_v.name.startswith("pypy")
+        )
+        if skip_pypy_special_case:
+            continue
+
+        # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in
+        # the ``include`` value, but distutils's ``headers`` does. We'll let
+        # CPython decide whether this is a bug or feature. See bpo-43948.
+        skip_osx_framework_user_special_case = (
+            user
+            and is_osx_framework()
+            and k == "headers"
+            and old_v.parent.parent == new_v.parent
+            and old_v.parent.name.startswith("python")
+        )
+        if skip_osx_framework_user_special_case:
+            continue
+
+        # On Red Hat and derived Linux distributions, distutils is patched to
+        # use "lib64" instead of "lib" for platlib.
+        if k == "platlib" and _looks_like_red_hat_lib():
+            continue
+
+        # On Python 3.9+, sysconfig's posix_user scheme sets platlib against
+        # sys.platlibdir, but distutils's unix_user incorrectly coninutes
+        # using the same $usersite for both platlib and purelib. This creates a
+        # mismatch when sys.platlibdir is not "lib".
+        skip_bpo_44860 = (
+            user
+            and k == "platlib"
+            and not WINDOWS
+            and sys.version_info >= (3, 9)
+            and _PLATLIBDIR != "lib"
+            and _looks_like_bpo_44860()
+        )
+        if skip_bpo_44860:
+            continue
+
+        # Slackware incorrectly patches posix_user to use lib64 instead of lib,
+        # but not usersite to match the location.
+        skip_slackware_user_scheme = (
+            user
+            and k in ("platlib", "purelib")
+            and not WINDOWS
+            and _looks_like_slackware_scheme()
+        )
+        if skip_slackware_user_scheme:
+            continue
+
+        # Both Debian and Red Hat patch Python to place the system site under
+        # /usr/local instead of /usr. Debian also places lib in dist-packages
+        # instead of site-packages, but the /usr/local check should cover it.
+        skip_linux_system_special_case = (
+            not (user or home or prefix or running_under_virtualenv())
+            and old_v.parts[1:3] == ("usr", "local")
+            and len(new_v.parts) > 1
+            and new_v.parts[1] == "usr"
+            and (len(new_v.parts) < 3 or new_v.parts[2] != "local")
+            and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())
+        )
+        if skip_linux_system_special_case:
+            continue
+
+        # On Python 3.7 and earlier, sysconfig does not include sys.abiflags in
+        # the "pythonX.Y" part of the path, but distutils does.
+        skip_sysconfig_abiflag_bug = (
+            sys.version_info < (3, 8)
+            and not WINDOWS
+            and k in ("headers", "platlib", "purelib")
+            and tuple(_fix_abiflags(old_v.parts)) == new_v.parts
+        )
+        if skip_sysconfig_abiflag_bug:
+            continue
+
+        # MSYS2 MINGW's sysconfig patch does not include the "site-packages"
+        # part of the path. This is incorrect and will be fixed in MSYS.
+        skip_msys2_mingw_bug = (
+            WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()
+        )
+        if skip_msys2_mingw_bug:
+            continue
+
+        # CPython's POSIX install script invokes pip (via ensurepip) against the
+        # interpreter located in the source tree, not the install site. This
+        # triggers special logic in sysconfig that's not present in distutils.
+        # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
+        skip_cpython_build = (
+            sysconfig.is_python_build(check_home=True)
+            and not WINDOWS
+            and k in ("headers", "include", "platinclude")
+        )
+        if skip_cpython_build:
+            continue
+
+        warning_contexts.append((old_v, new_v, f"scheme.{k}"))
+
+    if not warning_contexts:
+        return old
+
+    # Check if this path mismatch is caused by distutils config files. Those
+    # files will no longer work once we switch to sysconfig, so this raises a
+    # deprecation message for them.
+    default_old = _distutils.distutils_scheme(
+        dist_name,
+        user,
+        home,
+        root,
+        isolated,
+        prefix,
+        ignore_config_files=True,
+    )
+    if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
+        deprecated(
+            reason=(
+                "Configuring installation scheme with distutils config files "
+                "is deprecated and will no longer work in the near future. If you "
+                "are using a Homebrew or Linuxbrew Python, please see discussion "
+                "at https://github.com/Homebrew/homebrew-core/issues/76621"
+            ),
+            replacement=None,
+            gone_in=None,
+        )
+        return old
+
+    # Post warnings about this mismatch so user can report them back.
+    for old_v, new_v, key in warning_contexts:
+        _warn_mismatched(old_v, new_v, key=key)
+    _log_context(user=user, home=home, root=root, prefix=prefix)
+
+    return old
+
+
+def get_bin_prefix() -> str:
+    new = _sysconfig.get_bin_prefix()
+    if _USE_SYSCONFIG:
+        return new
+
+    old = _distutils.get_bin_prefix()
+    if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
+        _log_context()
+    return old
+
+
+def get_bin_user() -> str:
+    return _sysconfig.get_scheme("", user=True).scripts
+
+
+def _looks_like_deb_system_dist_packages(value: str) -> bool:
+    """Check if the value is Debian's APT-controlled dist-packages.
+
+    Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the
+    default package path controlled by APT, but does not patch ``sysconfig`` to
+    do the same. This is similar to the bug worked around in ``get_scheme()``,
+    but here the default is ``deb_system`` instead of ``unix_local``. Ultimately
+    we can't do anything about this Debian bug, and this detection allows us to
+    skip the warning when needed.
+    """
+    if not _looks_like_debian_scheme():
+        return False
+    if value == "/usr/lib/python3/dist-packages":
+        return True
+    return False
+
+
+def get_purelib() -> str:
+    """Return the default pure-Python lib location."""
+    new = _sysconfig.get_purelib()
+    if _USE_SYSCONFIG:
+        return new
+
+    old = _distutils.get_purelib()
+    if _looks_like_deb_system_dist_packages(old):
+        return old
+    if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
+        _log_context()
+    return old
+
+
+def get_platlib() -> str:
+    """Return the default platform-shared lib location."""
+    new = _sysconfig.get_platlib()
+    if _USE_SYSCONFIG:
+        return new
+
+    from . import _distutils
+
+    old = _distutils.get_platlib()
+    if _looks_like_deb_system_dist_packages(old):
+        return old
+    if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
+        _log_context()
+    return old
+
+
+def _deduplicated(v1: str, v2: str) -> List[str]:
+    """Deduplicate values from a list."""
+    if v1 == v2:
+        return [v1]
+    return [v1, v2]
+
+
+def _looks_like_apple_library(path: str) -> bool:
+    """Apple patches sysconfig to *always* look under */Library/Python*."""
+    if sys.platform[:6] != "darwin":
+        return False
+    return path == f"/Library/Python/{get_major_minor_version()}/site-packages"
+
+
+def get_prefixed_libs(prefix: str) -> List[str]:
+    """Return the lib locations under ``prefix``."""
+    new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix)
+    if _USE_SYSCONFIG:
+        return _deduplicated(new_pure, new_plat)
+
+    old_pure, old_plat = _distutils.get_prefixed_libs(prefix)
+    old_lib_paths = _deduplicated(old_pure, old_plat)
+
+    # Apple's Python (shipped with Xcode and Command Line Tools) hard-code
+    # platlib and purelib to '/Library/Python/X.Y/site-packages'. This will
+    # cause serious build isolation bugs when Apple starts shipping 3.10 because
+    # pip will install build backends to the wrong location. This tells users
+    # who is at fault so Apple may notice it and fix the issue in time.
+    if all(_looks_like_apple_library(p) for p in old_lib_paths):
+        deprecated(
+            reason=(
+                "Python distributed by Apple's Command Line Tools incorrectly "
+                "patches sysconfig to always point to '/Library/Python'. This "
+                "will cause build isolation to operate incorrectly on Python "
+                "3.10 or later. Please help report this to Apple so they can "
+                "fix this. https://developer.apple.com/bug-reporting/"
+            ),
+            replacement=None,
+            gone_in=None,
+        )
+        return old_lib_paths
+
+    warned = [
+        _warn_if_mismatch(
+            pathlib.Path(old_pure),
+            pathlib.Path(new_pure),
+            key="prefixed-purelib",
+        ),
+        _warn_if_mismatch(
+            pathlib.Path(old_plat),
+            pathlib.Path(new_plat),
+            key="prefixed-platlib",
+        ),
+    ]
+    if any(warned):
+        _log_context(prefix=prefix)
+
+    return old_lib_paths
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..7bdd3ad
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc
new file mode 100644
index 0000000..a0dadb6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc
new file mode 100644
index 0000000..92ce117
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/base.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/base.cpython-39.pyc
new file mode 100644
index 0000000..2d6df74
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/locations/__pycache__/base.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/locations/_distutils.py b/venv/lib/python3.9/site-packages/pip/_internal/locations/_distutils.py
new file mode 100644
index 0000000..c7712f0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/locations/_distutils.py
@@ -0,0 +1,180 @@
+"""Locations where we look for configs, install stuff, etc"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+# If pip's going to use distutils, it should not be using the copy that setuptools
+# might have injected into the environment. This is done by removing the injected
+# shim, if it's injected.
+#
+# See https://github.com/pypa/pip/issues/8761 for the original discussion and
+# rationale for why this is done within pip.
+try:
+    __import__("_distutils_hack").remove_shim()
+except (ImportError, AttributeError):
+    pass
+
+import logging
+import os
+import sys
+from distutils.cmd import Command as DistutilsCommand
+from distutils.command.install import SCHEME_KEYS
+from distutils.command.install import install as distutils_install_command
+from distutils.sysconfig import get_python_lib
+from typing import Dict, List, Optional, Tuple, Union, cast
+
+from pip._internal.models.scheme import Scheme
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from .base import get_major_minor_version
+
+logger = logging.getLogger(__name__)
+
+
+def distutils_scheme(
+    dist_name: str,
+    user: bool = False,
+    home: Optional[str] = None,
+    root: Optional[str] = None,
+    isolated: bool = False,
+    prefix: Optional[str] = None,
+    *,
+    ignore_config_files: bool = False,
+) -> Dict[str, str]:
+    """
+    Return a distutils install scheme
+    """
+    from distutils.dist import Distribution
+
+    dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name}
+    if isolated:
+        dist_args["script_args"] = ["--no-user-cfg"]
+
+    d = Distribution(dist_args)
+    if not ignore_config_files:
+        try:
+            d.parse_config_files()
+        except UnicodeDecodeError:
+            # Typeshed does not include find_config_files() for some reason.
+            paths = d.find_config_files()  # type: ignore
+            logger.warning(
+                "Ignore distutils configs in %s due to encoding errors.",
+                ", ".join(os.path.basename(p) for p in paths),
+            )
+    obj: Optional[DistutilsCommand] = None
+    obj = d.get_command_obj("install", create=True)
+    assert obj is not None
+    i = cast(distutils_install_command, obj)
+    # NOTE: setting user or home has the side-effect of creating the home dir
+    # or user base for installations during finalize_options()
+    # ideally, we'd prefer a scheme class that has no side-effects.
+    assert not (user and prefix), f"user={user} prefix={prefix}"
+    assert not (home and prefix), f"home={home} prefix={prefix}"
+    i.user = user or i.user
+    if user or home:
+        i.prefix = ""
+    i.prefix = prefix or i.prefix
+    i.home = home or i.home
+    i.root = root or i.root
+    i.finalize_options()
+
+    scheme = {}
+    for key in SCHEME_KEYS:
+        scheme[key] = getattr(i, "install_" + key)
+
+    # install_lib specified in setup.cfg should install *everything*
+    # into there (i.e. it takes precedence over both purelib and
+    # platlib).  Note, i.install_lib is *always* set after
+    # finalize_options(); we only want to override here if the user
+    # has explicitly requested it hence going back to the config
+    if "install_lib" in d.get_option_dict("install"):
+        scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
+
+    if running_under_virtualenv():
+        if home:
+            prefix = home
+        elif user:
+            prefix = i.install_userbase
+        else:
+            prefix = i.prefix
+        scheme["headers"] = os.path.join(
+            prefix,
+            "include",
+            "site",
+            f"python{get_major_minor_version()}",
+            dist_name,
+        )
+
+        if root is not None:
+            path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
+            scheme["headers"] = os.path.join(root, path_no_drive[1:])
+
+    return scheme
+
+
+def get_scheme(
+    dist_name: str,
+    user: bool = False,
+    home: Optional[str] = None,
+    root: Optional[str] = None,
+    isolated: bool = False,
+    prefix: Optional[str] = None,
+) -> Scheme:
+    """
+    Get the "scheme" corresponding to the input parameters. The distutils
+    documentation provides the context for the available schemes:
+    https://docs.python.org/3/install/index.html#alternate-installation
+
+    :param dist_name: the name of the package to retrieve the scheme for, used
+        in the headers scheme path
+    :param user: indicates to use the "user" scheme
+    :param home: indicates to use the "home" scheme and provides the base
+        directory for the same
+    :param root: root under which other directories are re-based
+    :param isolated: equivalent to --no-user-cfg, i.e. do not consider
+        ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
+        scheme paths
+    :param prefix: indicates to use the "prefix" scheme and provides the
+        base directory for the same
+    """
+    scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix)
+    return Scheme(
+        platlib=scheme["platlib"],
+        purelib=scheme["purelib"],
+        headers=scheme["headers"],
+        scripts=scheme["scripts"],
+        data=scheme["data"],
+    )
+
+
+def get_bin_prefix() -> str:
+    # XXX: In old virtualenv versions, sys.prefix can contain '..' components,
+    # so we need to call normpath to eliminate them.
+    prefix = os.path.normpath(sys.prefix)
+    if WINDOWS:
+        bin_py = os.path.join(prefix, "Scripts")
+        # buildout uses 'bin' on Windows too?
+        if not os.path.exists(bin_py):
+            bin_py = os.path.join(prefix, "bin")
+        return bin_py
+    # Forcing to use /usr/local/bin for standard macOS framework installs
+    # Also log to ~/Library/Logs/ for use with the Console.app log viewer
+    if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/":
+        return "/usr/local/bin"
+    return os.path.join(prefix, "bin")
+
+
+def get_purelib() -> str:
+    return get_python_lib(plat_specific=False)
+
+
+def get_platlib() -> str:
+    return get_python_lib(plat_specific=True)
+
+
+def get_prefixed_libs(prefix: str) -> Tuple[str, str]:
+    return (
+        get_python_lib(plat_specific=False, prefix=prefix),
+        get_python_lib(plat_specific=True, prefix=prefix),
+    )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/locations/_sysconfig.py b/venv/lib/python3.9/site-packages/pip/_internal/locations/_sysconfig.py
new file mode 100644
index 0000000..0bbc928
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/locations/_sysconfig.py
@@ -0,0 +1,218 @@
+import logging
+import os
+import sys
+import sysconfig
+import typing
+
+from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid
+from pip._internal.models.scheme import SCHEME_KEYS, Scheme
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from .base import change_root, get_major_minor_version, is_osx_framework
+
+logger = logging.getLogger(__name__)
+
+
+# Notes on _infer_* functions.
+# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no
+# way to ask things like "what is the '_prefix' scheme on this platform". These
+# functions try to answer that with some heuristics while accounting for ad-hoc
+# platforms not covered by CPython's default sysconfig implementation. If the
+# ad-hoc implementation does not fully implement sysconfig, we'll fall back to
+# a POSIX scheme.
+
+_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
+
+_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None)
+
+
+def _should_use_osx_framework_prefix() -> bool:
+    """Check for Apple's ``osx_framework_library`` scheme.
+
+    Python distributed by Apple's Command Line Tools has this special scheme
+    that's used when:
+
+    * This is a framework build.
+    * We are installing into the system prefix.
+
+    This does not account for ``pip install --prefix`` (also means we're not
+    installing to the system prefix), which should use ``posix_prefix``, but
+    logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But
+    since ``prefix`` is not available for ``sysconfig.get_default_scheme()``,
+    which is the stdlib replacement for ``_infer_prefix()``, presumably Apple
+    wouldn't be able to magically switch between ``osx_framework_library`` and
+    ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library``
+    means its behavior is consistent whether we use the stdlib implementation
+    or our own, and we deal with this special case in ``get_scheme()`` instead.
+    """
+    return (
+        "osx_framework_library" in _AVAILABLE_SCHEMES
+        and not running_under_virtualenv()
+        and is_osx_framework()
+    )
+
+
+def _infer_prefix() -> str:
+    """Try to find a prefix scheme for the current platform.
+
+    This tries:
+
+    * A special ``osx_framework_library`` for Python distributed by Apple's
+      Command Line Tools, when not running in a virtual environment.
+    * Implementation + OS, used by PyPy on Windows (``pypy_nt``).
+    * Implementation without OS, used by PyPy on POSIX (``pypy``).
+    * OS + "prefix", used by CPython on POSIX (``posix_prefix``).
+    * Just the OS name, used by CPython on Windows (``nt``).
+
+    If none of the above works, fall back to ``posix_prefix``.
+    """
+    if _PREFERRED_SCHEME_API:
+        return _PREFERRED_SCHEME_API("prefix")
+    if _should_use_osx_framework_prefix():
+        return "osx_framework_library"
+    implementation_suffixed = f"{sys.implementation.name}_{os.name}"
+    if implementation_suffixed in _AVAILABLE_SCHEMES:
+        return implementation_suffixed
+    if sys.implementation.name in _AVAILABLE_SCHEMES:
+        return sys.implementation.name
+    suffixed = f"{os.name}_prefix"
+    if suffixed in _AVAILABLE_SCHEMES:
+        return suffixed
+    if os.name in _AVAILABLE_SCHEMES:  # On Windows, prefx is just called "nt".
+        return os.name
+    return "posix_prefix"
+
+
+def _infer_user() -> str:
+    """Try to find a user scheme for the current platform."""
+    if _PREFERRED_SCHEME_API:
+        return _PREFERRED_SCHEME_API("user")
+    if is_osx_framework() and not running_under_virtualenv():
+        suffixed = "osx_framework_user"
+    else:
+        suffixed = f"{os.name}_user"
+    if suffixed in _AVAILABLE_SCHEMES:
+        return suffixed
+    if "posix_user" not in _AVAILABLE_SCHEMES:  # User scheme unavailable.
+        raise UserInstallationInvalid()
+    return "posix_user"
+
+
+def _infer_home() -> str:
+    """Try to find a home for the current platform."""
+    if _PREFERRED_SCHEME_API:
+        return _PREFERRED_SCHEME_API("home")
+    suffixed = f"{os.name}_home"
+    if suffixed in _AVAILABLE_SCHEMES:
+        return suffixed
+    return "posix_home"
+
+
+# Update these keys if the user sets a custom home.
+_HOME_KEYS = [
+    "installed_base",
+    "base",
+    "installed_platbase",
+    "platbase",
+    "prefix",
+    "exec_prefix",
+]
+if sysconfig.get_config_var("userbase") is not None:
+    _HOME_KEYS.append("userbase")
+
+
+def get_scheme(
+    dist_name: str,
+    user: bool = False,
+    home: typing.Optional[str] = None,
+    root: typing.Optional[str] = None,
+    isolated: bool = False,
+    prefix: typing.Optional[str] = None,
+) -> Scheme:
+    """
+    Get the "scheme" corresponding to the input parameters.
+
+    :param dist_name: the name of the package to retrieve the scheme for, used
+        in the headers scheme path
+    :param user: indicates to use the "user" scheme
+    :param home: indicates to use the "home" scheme
+    :param root: root under which other directories are re-based
+    :param isolated: ignored, but kept for distutils compatibility (where
+        this controls whether the user-site pydistutils.cfg is honored)
+    :param prefix: indicates to use the "prefix" scheme and provides the
+        base directory for the same
+    """
+    if user and prefix:
+        raise InvalidSchemeCombination("--user", "--prefix")
+    if home and prefix:
+        raise InvalidSchemeCombination("--home", "--prefix")
+
+    if home is not None:
+        scheme_name = _infer_home()
+    elif user:
+        scheme_name = _infer_user()
+    else:
+        scheme_name = _infer_prefix()
+
+    # Special case: When installing into a custom prefix, use posix_prefix
+    # instead of osx_framework_library. See _should_use_osx_framework_prefix()
+    # docstring for details.
+    if prefix is not None and scheme_name == "osx_framework_library":
+        scheme_name = "posix_prefix"
+
+    if home is not None:
+        variables = {k: home for k in _HOME_KEYS}
+    elif prefix is not None:
+        variables = {k: prefix for k in _HOME_KEYS}
+    else:
+        variables = {}
+
+    paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)
+
+    # Logic here is very arbitrary, we're doing it for compatibility, don't ask.
+    # 1. Pip historically uses a special header path in virtual environments.
+    # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We
+    #    only do the same when not running in a virtual environment because
+    #    pip's historical header path logic (see point 1) did not do this.
+    if running_under_virtualenv():
+        if user:
+            base = variables.get("userbase", sys.prefix)
+        else:
+            base = variables.get("base", sys.prefix)
+        python_xy = f"python{get_major_minor_version()}"
+        paths["include"] = os.path.join(base, "include", "site", python_xy)
+    elif not dist_name:
+        dist_name = "UNKNOWN"
+
+    scheme = Scheme(
+        platlib=paths["platlib"],
+        purelib=paths["purelib"],
+        headers=os.path.join(paths["include"], dist_name),
+        scripts=paths["scripts"],
+        data=paths["data"],
+    )
+    if root is not None:
+        for key in SCHEME_KEYS:
+            value = change_root(root, getattr(scheme, key))
+            setattr(scheme, key, value)
+    return scheme
+
+
+def get_bin_prefix() -> str:
+    # Forcing to use /usr/local/bin for standard macOS framework installs.
+    if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
+        return "/usr/local/bin"
+    return sysconfig.get_paths()["scripts"]
+
+
+def get_purelib() -> str:
+    return sysconfig.get_paths()["purelib"]
+
+
+def get_platlib() -> str:
+    return sysconfig.get_paths()["platlib"]
+
+
+def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]:
+    paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix})
+    return (paths["purelib"], paths["platlib"])
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/locations/base.py b/venv/lib/python3.9/site-packages/pip/_internal/locations/base.py
new file mode 100644
index 0000000..3f7de00
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/locations/base.py
@@ -0,0 +1,81 @@
+import functools
+import os
+import site
+import sys
+import sysconfig
+import typing
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils import appdirs
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+# Application Directories
+USER_CACHE_DIR = appdirs.user_cache_dir("pip")
+
+# FIXME doesn't account for venv linked to global site-packages
+site_packages: typing.Optional[str] = sysconfig.get_path("purelib")
+
+
+def get_major_minor_version() -> str:
+    """
+    Return the major-minor version of the current Python as a string, e.g.
+    "3.7" or "3.10".
+    """
+    return "{}.{}".format(*sys.version_info)
+
+
+def change_root(new_root: str, pathname: str) -> str:
+    """Return 'pathname' with 'new_root' prepended.
+
+    If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname).
+    Otherwise, it requires making 'pathname' relative and then joining the
+    two, which is tricky on DOS/Windows and Mac OS.
+
+    This is borrowed from Python's standard library's distutils module.
+    """
+    if os.name == "posix":
+        if not os.path.isabs(pathname):
+            return os.path.join(new_root, pathname)
+        else:
+            return os.path.join(new_root, pathname[1:])
+
+    elif os.name == "nt":
+        (drive, path) = os.path.splitdrive(pathname)
+        if path[0] == "\\":
+            path = path[1:]
+        return os.path.join(new_root, path)
+
+    else:
+        raise InstallationError(
+            f"Unknown platform: {os.name}\n"
+            "Can not change root path prefix on unknown platform."
+        )
+
+
+def get_src_prefix() -> str:
+    if running_under_virtualenv():
+        src_prefix = os.path.join(sys.prefix, "src")
+    else:
+        # FIXME: keep src in cwd for now (it is not a temporary folder)
+        try:
+            src_prefix = os.path.join(os.getcwd(), "src")
+        except OSError:
+            # In case the current working directory has been renamed or deleted
+            sys.exit("The folder you are executing pip from can no longer be found.")
+
+    # under macOS + virtualenv sys.prefix is not properly resolved
+    # it is something like /path/to/python/bin/..
+    return os.path.abspath(src_prefix)
+
+
+try:
+    # Use getusersitepackages if this is present, as it ensures that the
+    # value is initialised properly.
+    user_site: typing.Optional[str] = site.getusersitepackages()
+except AttributeError:
+    user_site = site.USER_SITE
+
+
+@functools.lru_cache(maxsize=None)
+def is_osx_framework() -> bool:
+    return bool(sysconfig.get_config_var("PYTHONFRAMEWORK"))
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/main.py b/venv/lib/python3.9/site-packages/pip/_internal/main.py
new file mode 100644
index 0000000..33c6d24
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/main.py
@@ -0,0 +1,12 @@
+from typing import List, Optional
+
+
+def main(args: Optional[List[str]] = None) -> int:
+    """This is preserved for old console scripts that may still be referencing
+    it.
+
+    For additional details, see https://github.com/pypa/pip/issues/7498.
+    """
+    from pip._internal.utils.entrypoints import _wrapper
+
+    return _wrapper(args)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/metadata/__init__.py
new file mode 100644
index 0000000..9f73ca7
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/__init__.py
@@ -0,0 +1,127 @@
+import contextlib
+import functools
+import os
+import sys
+from typing import TYPE_CHECKING, List, Optional, Type, cast
+
+from pip._internal.utils.misc import strtobool
+
+from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
+
+if TYPE_CHECKING:
+    from typing import Protocol
+else:
+    Protocol = object
+
+__all__ = [
+    "BaseDistribution",
+    "BaseEnvironment",
+    "FilesystemWheel",
+    "MemoryWheel",
+    "Wheel",
+    "get_default_environment",
+    "get_environment",
+    "get_wheel_distribution",
+    "select_backend",
+]
+
+
+def _should_use_importlib_metadata() -> bool:
+    """Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend.
+
+    By default, pip uses ``importlib.metadata`` on Python 3.11+, and
+    ``pkg_resourcess`` otherwise. This can be overridden by a couple of ways:
+
+    * If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it
+      dictates whether ``importlib.metadata`` is used, regardless of Python
+      version.
+    * On Python 3.11+, Python distributors can patch ``importlib.metadata``
+      to add a global constant ``_PIP_USE_IMPORTLIB_METADATA = False``. This
+      makes pip use ``pkg_resources`` (unless the user set the aforementioned
+      environment variable to *True*).
+    """
+    with contextlib.suppress(KeyError, ValueError):
+        return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"]))
+    if sys.version_info < (3, 11):
+        return False
+    import importlib.metadata
+
+    return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True))
+
+
+class Backend(Protocol):
+    Distribution: Type[BaseDistribution]
+    Environment: Type[BaseEnvironment]
+
+
+@functools.lru_cache(maxsize=None)
+def select_backend() -> Backend:
+    if _should_use_importlib_metadata():
+        from . import importlib
+
+        return cast(Backend, importlib)
+    from . import pkg_resources
+
+    return cast(Backend, pkg_resources)
+
+
+def get_default_environment() -> BaseEnvironment:
+    """Get the default representation for the current environment.
+
+    This returns an Environment instance from the chosen backend. The default
+    Environment instance should be built from ``sys.path`` and may use caching
+    to share instance state accorss calls.
+    """
+    return select_backend().Environment.default()
+
+
+def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
+    """Get a representation of the environment specified by ``paths``.
+
+    This returns an Environment instance from the chosen backend based on the
+    given import paths. The backend must build a fresh instance representing
+    the state of installed distributions when this function is called.
+    """
+    return select_backend().Environment.from_paths(paths)
+
+
+def get_directory_distribution(directory: str) -> BaseDistribution:
+    """Get the distribution metadata representation in the specified directory.
+
+    This returns a Distribution instance from the chosen backend based on
+    the given on-disk ``.dist-info`` directory.
+    """
+    return select_backend().Distribution.from_directory(directory)
+
+
+def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution:
+    """Get the representation of the specified wheel's distribution metadata.
+
+    This returns a Distribution instance from the chosen backend based on
+    the given wheel's ``.dist-info`` directory.
+
+    :param canonical_name: Normalized project name of the given wheel.
+    """
+    return select_backend().Distribution.from_wheel(wheel, canonical_name)
+
+
+def get_metadata_distribution(
+    metadata_contents: bytes,
+    filename: str,
+    canonical_name: str,
+) -> BaseDistribution:
+    """Get the dist representation of the specified METADATA file contents.
+
+    This returns a Distribution instance from the chosen backend sourced from the data
+    in `metadata_contents`.
+
+    :param metadata_contents: Contents of a METADATA file within a dist, or one served
+                              via PEP 658.
+    :param filename: Filename for the dist this metadata represents.
+    :param canonical_name: Normalized project name of the given dist.
+    """
+    return select_backend().Distribution.from_metadata_file_contents(
+        metadata_contents,
+        filename,
+        canonical_name,
+    )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..6fba955
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-39.pyc
new file mode 100644
index 0000000..98cf1aa
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/base.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/base.cpython-39.pyc
new file mode 100644
index 0000000..4460cf0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/base.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc
new file mode 100644
index 0000000..de22ff1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/_json.py b/venv/lib/python3.9/site-packages/pip/_internal/metadata/_json.py
new file mode 100644
index 0000000..336b52f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/_json.py
@@ -0,0 +1,84 @@
+# Extracted from https://github.com/pfmoore/pkg_metadata
+
+from email.header import Header, decode_header, make_header
+from email.message import Message
+from typing import Any, Dict, List, Union
+
+METADATA_FIELDS = [
+    # Name, Multiple-Use
+    ("Metadata-Version", False),
+    ("Name", False),
+    ("Version", False),
+    ("Dynamic", True),
+    ("Platform", True),
+    ("Supported-Platform", True),
+    ("Summary", False),
+    ("Description", False),
+    ("Description-Content-Type", False),
+    ("Keywords", False),
+    ("Home-page", False),
+    ("Download-URL", False),
+    ("Author", False),
+    ("Author-email", False),
+    ("Maintainer", False),
+    ("Maintainer-email", False),
+    ("License", False),
+    ("Classifier", True),
+    ("Requires-Dist", True),
+    ("Requires-Python", False),
+    ("Requires-External", True),
+    ("Project-URL", True),
+    ("Provides-Extra", True),
+    ("Provides-Dist", True),
+    ("Obsoletes-Dist", True),
+]
+
+
+def json_name(field: str) -> str:
+    return field.lower().replace("-", "_")
+
+
+def msg_to_json(msg: Message) -> Dict[str, Any]:
+    """Convert a Message object into a JSON-compatible dictionary."""
+
+    def sanitise_header(h: Union[Header, str]) -> str:
+        if isinstance(h, Header):
+            chunks = []
+            for bytes, encoding in decode_header(h):
+                if encoding == "unknown-8bit":
+                    try:
+                        # See if UTF-8 works
+                        bytes.decode("utf-8")
+                        encoding = "utf-8"
+                    except UnicodeDecodeError:
+                        # If not, latin1 at least won't fail
+                        encoding = "latin1"
+                chunks.append((bytes, encoding))
+            return str(make_header(chunks))
+        return str(h)
+
+    result = {}
+    for field, multi in METADATA_FIELDS:
+        if field not in msg:
+            continue
+        key = json_name(field)
+        if multi:
+            value: Union[str, List[str]] = [
+                sanitise_header(v) for v in msg.get_all(field)
+            ]
+        else:
+            value = sanitise_header(msg.get(field))
+            if key == "keywords":
+                # Accept both comma-separated and space-separated
+                # forms, for better compatibility with old data.
+                if "," in value:
+                    value = [v.strip() for v in value.split(",")]
+                else:
+                    value = value.split()
+        result[key] = value
+
+    payload = msg.get_payload()
+    if payload:
+        result["description"] = payload
+
+    return result
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/base.py b/venv/lib/python3.9/site-packages/pip/_internal/metadata/base.py
new file mode 100644
index 0000000..cafb79f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/base.py
@@ -0,0 +1,688 @@
+import csv
+import email.message
+import functools
+import json
+import logging
+import pathlib
+import re
+import zipfile
+from typing import (
+    IO,
+    TYPE_CHECKING,
+    Any,
+    Collection,
+    Container,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    NamedTuple,
+    Optional,
+    Tuple,
+    Union,
+)
+
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
+from pip._vendor.packaging.utils import NormalizedName
+from pip._vendor.packaging.version import LegacyVersion, Version
+
+from pip._internal.exceptions import NoneMetadataError
+from pip._internal.locations import site_packages, user_site
+from pip._internal.models.direct_url import (
+    DIRECT_URL_METADATA_NAME,
+    DirectUrl,
+    DirectUrlValidationError,
+)
+from pip._internal.utils.compat import stdlib_pkgs  # TODO: Move definition here.
+from pip._internal.utils.egg_link import egg_link_path_from_sys_path
+from pip._internal.utils.misc import is_local, normalize_path
+from pip._internal.utils.packaging import safe_extra
+from pip._internal.utils.urls import url_to_path
+
+from ._json import msg_to_json
+
+if TYPE_CHECKING:
+    from typing import Protocol
+else:
+    Protocol = object
+
+DistributionVersion = Union[LegacyVersion, Version]
+
+InfoPath = Union[str, pathlib.PurePath]
+
+logger = logging.getLogger(__name__)
+
+
+class BaseEntryPoint(Protocol):
+    @property
+    def name(self) -> str:
+        raise NotImplementedError()
+
+    @property
+    def value(self) -> str:
+        raise NotImplementedError()
+
+    @property
+    def group(self) -> str:
+        raise NotImplementedError()
+
+
+def _convert_installed_files_path(
+    entry: Tuple[str, ...],
+    info: Tuple[str, ...],
+) -> str:
+    """Convert a legacy installed-files.txt path into modern RECORD path.
+
+    The legacy format stores paths relative to the info directory, while the
+    modern format stores paths relative to the package root, e.g. the
+    site-packages directory.
+
+    :param entry: Path parts of the installed-files.txt entry.
+    :param info: Path parts of the egg-info directory relative to package root.
+    :returns: The converted entry.
+
+    For best compatibility with symlinks, this does not use ``abspath()`` or
+    ``Path.resolve()``, but tries to work with path parts:
+
+    1. While ``entry`` starts with ``..``, remove the equal amounts of parts
+       from ``info``; if ``info`` is empty, start appending ``..`` instead.
+    2. Join the two directly.
+    """
+    while entry and entry[0] == "..":
+        if not info or info[-1] == "..":
+            info += ("..",)
+        else:
+            info = info[:-1]
+        entry = entry[1:]
+    return str(pathlib.Path(*info, *entry))
+
+
+class RequiresEntry(NamedTuple):
+    requirement: str
+    extra: str
+    marker: str
+
+
+class BaseDistribution(Protocol):
+    @classmethod
+    def from_directory(cls, directory: str) -> "BaseDistribution":
+        """Load the distribution from a metadata directory.
+
+        :param directory: Path to a metadata directory, e.g. ``.dist-info``.
+        """
+        raise NotImplementedError()
+
+    @classmethod
+    def from_metadata_file_contents(
+        cls,
+        metadata_contents: bytes,
+        filename: str,
+        project_name: str,
+    ) -> "BaseDistribution":
+        """Load the distribution from the contents of a METADATA file.
+
+        This is used to implement PEP 658 by generating a "shallow" dist object that can
+        be used for resolution without downloading or building the actual dist yet.
+
+        :param metadata_contents: The contents of a METADATA file.
+        :param filename: File name for the dist with this metadata.
+        :param project_name: Name of the project this dist represents.
+        """
+        raise NotImplementedError()
+
+    @classmethod
+    def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution":
+        """Load the distribution from a given wheel.
+
+        :param wheel: A concrete wheel definition.
+        :param name: File name of the wheel.
+
+        :raises InvalidWheel: Whenever loading of the wheel causes a
+            :py:exc:`zipfile.BadZipFile` exception to be thrown.
+        :raises UnsupportedWheel: If the wheel is a valid zip, but malformed
+            internally.
+        """
+        raise NotImplementedError()
+
+    def __repr__(self) -> str:
+        return f"{self.raw_name} {self.version} ({self.location})"
+
+    def __str__(self) -> str:
+        return f"{self.raw_name} {self.version}"
+
+    @property
+    def location(self) -> Optional[str]:
+        """Where the distribution is loaded from.
+
+        A string value is not necessarily a filesystem path, since distributions
+        can be loaded from other sources, e.g. arbitrary zip archives. ``None``
+        means the distribution is created in-memory.
+
+        Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
+        this is a symbolic link, we want to preserve the relative path between
+        it and files in the distribution.
+        """
+        raise NotImplementedError()
+
+    @property
+    def editable_project_location(self) -> Optional[str]:
+        """The project location for editable distributions.
+
+        This is the directory where pyproject.toml or setup.py is located.
+        None if the distribution is not installed in editable mode.
+        """
+        # TODO: this property is relatively costly to compute, memoize it ?
+        direct_url = self.direct_url
+        if direct_url:
+            if direct_url.is_local_editable():
+                return url_to_path(direct_url.url)
+        else:
+            # Search for an .egg-link file by walking sys.path, as it was
+            # done before by dist_is_editable().
+            egg_link_path = egg_link_path_from_sys_path(self.raw_name)
+            if egg_link_path:
+                # TODO: get project location from second line of egg_link file
+                #       (https://github.com/pypa/pip/issues/10243)
+                return self.location
+        return None
+
+    @property
+    def installed_location(self) -> Optional[str]:
+        """The distribution's "installed" location.
+
+        This should generally be a ``site-packages`` directory. This is
+        usually ``dist.location``, except for legacy develop-installed packages,
+        where ``dist.location`` is the source code location, and this is where
+        the ``.egg-link`` file is.
+
+        The returned location is normalized (in particular, with symlinks removed).
+        """
+        raise NotImplementedError()
+
+    @property
+    def info_location(self) -> Optional[str]:
+        """Location of the .[egg|dist]-info directory or file.
+
+        Similarly to ``location``, a string value is not necessarily a
+        filesystem path. ``None`` means the distribution is created in-memory.
+
+        For a modern .dist-info installation on disk, this should be something
+        like ``{location}/{raw_name}-{version}.dist-info``.
+
+        Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
+        this is a symbolic link, we want to preserve the relative path between
+        it and other files in the distribution.
+        """
+        raise NotImplementedError()
+
+    @property
+    def installed_by_distutils(self) -> bool:
+        """Whether this distribution is installed with legacy distutils format.
+
+        A distribution installed with "raw" distutils not patched by setuptools
+        uses one single file at ``info_location`` to store metadata. We need to
+        treat this specially on uninstallation.
+        """
+        info_location = self.info_location
+        if not info_location:
+            return False
+        return pathlib.Path(info_location).is_file()
+
+    @property
+    def installed_as_egg(self) -> bool:
+        """Whether this distribution is installed as an egg.
+
+        This usually indicates the distribution was installed by (older versions
+        of) easy_install.
+        """
+        location = self.location
+        if not location:
+            return False
+        return location.endswith(".egg")
+
+    @property
+    def installed_with_setuptools_egg_info(self) -> bool:
+        """Whether this distribution is installed with the ``.egg-info`` format.
+
+        This usually indicates the distribution was installed with setuptools
+        with an old pip version or with ``single-version-externally-managed``.
+
+        Note that this ensure the metadata store is a directory. distutils can
+        also installs an ``.egg-info``, but as a file, not a directory. This
+        property is *False* for that case. Also see ``installed_by_distutils``.
+        """
+        info_location = self.info_location
+        if not info_location:
+            return False
+        if not info_location.endswith(".egg-info"):
+            return False
+        return pathlib.Path(info_location).is_dir()
+
+    @property
+    def installed_with_dist_info(self) -> bool:
+        """Whether this distribution is installed with the "modern format".
+
+        This indicates a "modern" installation, e.g. storing metadata in the
+        ``.dist-info`` directory. This applies to installations made by
+        setuptools (but through pip, not directly), or anything using the
+        standardized build backend interface (PEP 517).
+        """
+        info_location = self.info_location
+        if not info_location:
+            return False
+        if not info_location.endswith(".dist-info"):
+            return False
+        return pathlib.Path(info_location).is_dir()
+
+    @property
+    def canonical_name(self) -> NormalizedName:
+        raise NotImplementedError()
+
+    @property
+    def version(self) -> DistributionVersion:
+        raise NotImplementedError()
+
+    @property
+    def setuptools_filename(self) -> str:
+        """Convert a project name to its setuptools-compatible filename.
+
+        This is a copy of ``pkg_resources.to_filename()`` for compatibility.
+        """
+        return self.raw_name.replace("-", "_")
+
+    @property
+    def direct_url(self) -> Optional[DirectUrl]:
+        """Obtain a DirectUrl from this distribution.
+
+        Returns None if the distribution has no `direct_url.json` metadata,
+        or if `direct_url.json` is invalid.
+        """
+        try:
+            content = self.read_text(DIRECT_URL_METADATA_NAME)
+        except FileNotFoundError:
+            return None
+        try:
+            return DirectUrl.from_json(content)
+        except (
+            UnicodeDecodeError,
+            json.JSONDecodeError,
+            DirectUrlValidationError,
+        ) as e:
+            logger.warning(
+                "Error parsing %s for %s: %s",
+                DIRECT_URL_METADATA_NAME,
+                self.canonical_name,
+                e,
+            )
+            return None
+
+    @property
+    def installer(self) -> str:
+        try:
+            installer_text = self.read_text("INSTALLER")
+        except (OSError, ValueError, NoneMetadataError):
+            return ""  # Fail silently if the installer file cannot be read.
+        for line in installer_text.splitlines():
+            cleaned_line = line.strip()
+            if cleaned_line:
+                return cleaned_line
+        return ""
+
+    @property
+    def requested(self) -> bool:
+        return self.is_file("REQUESTED")
+
+    @property
+    def editable(self) -> bool:
+        return bool(self.editable_project_location)
+
+    @property
+    def local(self) -> bool:
+        """If distribution is installed in the current virtual environment.
+
+        Always True if we're not in a virtualenv.
+        """
+        if self.installed_location is None:
+            return False
+        return is_local(self.installed_location)
+
+    @property
+    def in_usersite(self) -> bool:
+        if self.installed_location is None or user_site is None:
+            return False
+        return self.installed_location.startswith(normalize_path(user_site))
+
+    @property
+    def in_site_packages(self) -> bool:
+        if self.installed_location is None or site_packages is None:
+            return False
+        return self.installed_location.startswith(normalize_path(site_packages))
+
+    def is_file(self, path: InfoPath) -> bool:
+        """Check whether an entry in the info directory is a file."""
+        raise NotImplementedError()
+
+    def iter_distutils_script_names(self) -> Iterator[str]:
+        """Find distutils 'scripts' entries metadata.
+
+        If 'scripts' is supplied in ``setup.py``, distutils records those in the
+        installed distribution's ``scripts`` directory, a file for each script.
+        """
+        raise NotImplementedError()
+
+    def read_text(self, path: InfoPath) -> str:
+        """Read a file in the info directory.
+
+        :raise FileNotFoundError: If ``path`` does not exist in the directory.
+        :raise NoneMetadataError: If ``path`` exists in the info directory, but
+            cannot be read.
+        """
+        raise NotImplementedError()
+
+    def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
+        raise NotImplementedError()
+
+    def _metadata_impl(self) -> email.message.Message:
+        raise NotImplementedError()
+
+    @functools.lru_cache(maxsize=1)
+    def _metadata_cached(self) -> email.message.Message:
+        # When we drop python 3.7 support, move this to the metadata property and use
+        # functools.cached_property instead of lru_cache.
+        metadata = self._metadata_impl()
+        self._add_egg_info_requires(metadata)
+        return metadata
+
+    @property
+    def metadata(self) -> email.message.Message:
+        """Metadata of distribution parsed from e.g. METADATA or PKG-INFO.
+
+        This should return an empty message if the metadata file is unavailable.
+
+        :raises NoneMetadataError: If the metadata file is available, but does
+            not contain valid metadata.
+        """
+        return self._metadata_cached()
+
+    @property
+    def metadata_dict(self) -> Dict[str, Any]:
+        """PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO.
+
+        This should return an empty dict if the metadata file is unavailable.
+
+        :raises NoneMetadataError: If the metadata file is available, but does
+            not contain valid metadata.
+        """
+        return msg_to_json(self.metadata)
+
+    @property
+    def metadata_version(self) -> Optional[str]:
+        """Value of "Metadata-Version:" in distribution metadata, if available."""
+        return self.metadata.get("Metadata-Version")
+
+    @property
+    def raw_name(self) -> str:
+        """Value of "Name:" in distribution metadata."""
+        # The metadata should NEVER be missing the Name: key, but if it somehow
+        # does, fall back to the known canonical name.
+        return self.metadata.get("Name", self.canonical_name)
+
+    @property
+    def requires_python(self) -> SpecifierSet:
+        """Value of "Requires-Python:" in distribution metadata.
+
+        If the key does not exist or contains an invalid value, an empty
+        SpecifierSet should be returned.
+        """
+        value = self.metadata.get("Requires-Python")
+        if value is None:
+            return SpecifierSet()
+        try:
+            # Convert to str to satisfy the type checker; this can be a Header object.
+            spec = SpecifierSet(str(value))
+        except InvalidSpecifier as e:
+            message = "Package %r has an invalid Requires-Python: %s"
+            logger.warning(message, self.raw_name, e)
+            return SpecifierSet()
+        return spec
+
+    def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
+        """Dependencies of this distribution.
+
+        For modern .dist-info distributions, this is the collection of
+        "Requires-Dist:" entries in distribution metadata.
+        """
+        raise NotImplementedError()
+
+    def iter_provided_extras(self) -> Iterable[str]:
+        """Extras provided by this distribution.
+
+        For modern .dist-info distributions, this is the collection of
+        "Provides-Extra:" entries in distribution metadata.
+        """
+        raise NotImplementedError()
+
+    def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]:
+        try:
+            text = self.read_text("RECORD")
+        except FileNotFoundError:
+            return None
+        # This extra Path-str cast normalizes entries.
+        return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
+
+    def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]:
+        try:
+            text = self.read_text("installed-files.txt")
+        except FileNotFoundError:
+            return None
+        paths = (p for p in text.splitlines(keepends=False) if p)
+        root = self.location
+        info = self.info_location
+        if root is None or info is None:
+            return paths
+        try:
+            info_rel = pathlib.Path(info).relative_to(root)
+        except ValueError:  # info is not relative to root.
+            return paths
+        if not info_rel.parts:  # info *is* root.
+            return paths
+        return (
+            _convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts)
+            for p in paths
+        )
+
+    def iter_declared_entries(self) -> Optional[Iterator[str]]:
+        """Iterate through file entries declared in this distribution.
+
+        For modern .dist-info distributions, this is the files listed in the
+        ``RECORD`` metadata file. For legacy setuptools distributions, this
+        comes from ``installed-files.txt``, with entries normalized to be
+        compatible with the format used by ``RECORD``.
+
+        :return: An iterator for listed entries, or None if the distribution
+            contains neither ``RECORD`` nor ``installed-files.txt``.
+        """
+        return (
+            self._iter_declared_entries_from_record()
+            or self._iter_declared_entries_from_legacy()
+        )
+
+    def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]:
+        """Parse a ``requires.txt`` in an egg-info directory.
+
+        This is an INI-ish format where an egg-info stores dependencies. A
+        section name describes extra other environment markers, while each entry
+        is an arbitrary string (not a key-value pair) representing a dependency
+        as a requirement string (no markers).
+
+        There is a construct in ``importlib.metadata`` called ``Sectioned`` that
+        does mostly the same, but the format is currently considered private.
+        """
+        try:
+            content = self.read_text("requires.txt")
+        except FileNotFoundError:
+            return
+        extra = marker = ""  # Section-less entries don't have markers.
+        for line in content.splitlines():
+            line = line.strip()
+            if not line or line.startswith("#"):  # Comment; ignored.
+                continue
+            if line.startswith("[") and line.endswith("]"):  # A section header.
+                extra, _, marker = line.strip("[]").partition(":")
+                continue
+            yield RequiresEntry(requirement=line, extra=extra, marker=marker)
+
+    def _iter_egg_info_extras(self) -> Iterable[str]:
+        """Get extras from the egg-info directory."""
+        known_extras = {""}
+        for entry in self._iter_requires_txt_entries():
+            if entry.extra in known_extras:
+                continue
+            known_extras.add(entry.extra)
+            yield entry.extra
+
+    def _iter_egg_info_dependencies(self) -> Iterable[str]:
+        """Get distribution dependencies from the egg-info directory.
+
+        To ease parsing, this converts a legacy dependency entry into a PEP 508
+        requirement string. Like ``_iter_requires_txt_entries()``, there is code
+        in ``importlib.metadata`` that does mostly the same, but not do exactly
+        what we need.
+
+        Namely, ``importlib.metadata`` does not normalize the extra name before
+        putting it into the requirement string, which causes marker comparison
+        to fail because the dist-info format do normalize. This is consistent in
+        all currently available PEP 517 backends, although not standardized.
+        """
+        for entry in self._iter_requires_txt_entries():
+            if entry.extra and entry.marker:
+                marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"'
+            elif entry.extra:
+                marker = f'extra == "{safe_extra(entry.extra)}"'
+            elif entry.marker:
+                marker = entry.marker
+            else:
+                marker = ""
+            if marker:
+                yield f"{entry.requirement} ; {marker}"
+            else:
+                yield entry.requirement
+
+    def _add_egg_info_requires(self, metadata: email.message.Message) -> None:
+        """Add egg-info requires.txt information to the metadata."""
+        if not metadata.get_all("Requires-Dist"):
+            for dep in self._iter_egg_info_dependencies():
+                metadata["Requires-Dist"] = dep
+        if not metadata.get_all("Provides-Extra"):
+            for extra in self._iter_egg_info_extras():
+                metadata["Provides-Extra"] = extra
+
+
+class BaseEnvironment:
+    """An environment containing distributions to introspect."""
+
+    @classmethod
+    def default(cls) -> "BaseEnvironment":
+        raise NotImplementedError()
+
+    @classmethod
+    def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment":
+        raise NotImplementedError()
+
+    def get_distribution(self, name: str) -> Optional["BaseDistribution"]:
+        """Given a requirement name, return the installed distributions.
+
+        The name may not be normalized. The implementation must canonicalize
+        it for lookup.
+        """
+        raise NotImplementedError()
+
+    def _iter_distributions(self) -> Iterator["BaseDistribution"]:
+        """Iterate through installed distributions.
+
+        This function should be implemented by subclass, but never called
+        directly. Use the public ``iter_distribution()`` instead, which
+        implements additional logic to make sure the distributions are valid.
+        """
+        raise NotImplementedError()
+
+    def iter_all_distributions(self) -> Iterator[BaseDistribution]:
+        """Iterate through all installed distributions without any filtering."""
+        for dist in self._iter_distributions():
+            # Make sure the distribution actually comes from a valid Python
+            # packaging distribution. Pip's AdjacentTempDirectory leaves folders
+            # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The
+            # valid project name pattern is taken from PEP 508.
+            project_name_valid = re.match(
+                r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$",
+                dist.canonical_name,
+                flags=re.IGNORECASE,
+            )
+            if not project_name_valid:
+                logger.warning(
+                    "Ignoring invalid distribution %s (%s)",
+                    dist.canonical_name,
+                    dist.location,
+                )
+                continue
+            yield dist
+
+    def iter_installed_distributions(
+        self,
+        local_only: bool = True,
+        skip: Container[str] = stdlib_pkgs,
+        include_editables: bool = True,
+        editables_only: bool = False,
+        user_only: bool = False,
+    ) -> Iterator[BaseDistribution]:
+        """Return a list of installed distributions.
+
+        This is based on ``iter_all_distributions()`` with additional filtering
+        options. Note that ``iter_installed_distributions()`` without arguments
+        is *not* equal to ``iter_all_distributions()``, since some of the
+        configurations exclude packages by default.
+
+        :param local_only: If True (default), only return installations
+        local to the current virtualenv, if in a virtualenv.
+        :param skip: An iterable of canonicalized project names to ignore;
+            defaults to ``stdlib_pkgs``.
+        :param include_editables: If False, don't report editables.
+        :param editables_only: If True, only report editables.
+        :param user_only: If True, only report installations in the user
+        site directory.
+        """
+        it = self.iter_all_distributions()
+        if local_only:
+            it = (d for d in it if d.local)
+        if not include_editables:
+            it = (d for d in it if not d.editable)
+        if editables_only:
+            it = (d for d in it if d.editable)
+        if user_only:
+            it = (d for d in it if d.in_usersite)
+        return (d for d in it if d.canonical_name not in skip)
+
+
+class Wheel(Protocol):
+    location: str
+
+    def as_zipfile(self) -> zipfile.ZipFile:
+        raise NotImplementedError()
+
+
+class FilesystemWheel(Wheel):
+    def __init__(self, location: str) -> None:
+        self.location = location
+
+    def as_zipfile(self) -> zipfile.ZipFile:
+        return zipfile.ZipFile(self.location, allowZip64=True)
+
+
+class MemoryWheel(Wheel):
+    def __init__(self, location: str, stream: IO[bytes]) -> None:
+        self.location = location
+        self.stream = stream
+
+    def as_zipfile(self) -> zipfile.ZipFile:
+        return zipfile.ZipFile(self.stream, allowZip64=True)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__init__.py
new file mode 100644
index 0000000..5e7af9f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__init__.py
@@ -0,0 +1,4 @@
+from ._dists import Distribution
+from ._envs import Environment
+
+__all__ = ["Distribution", "Environment"]
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..1cd2dd2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-39.pyc
new file mode 100644
index 0000000..7133b2d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-39.pyc
new file mode 100644
index 0000000..4f409d8
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-39.pyc
new file mode 100644
index 0000000..be8bc21
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/_compat.py b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/_compat.py
new file mode 100644
index 0000000..593bff2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/_compat.py
@@ -0,0 +1,55 @@
+import importlib.metadata
+from typing import Any, Optional, Protocol, cast
+
+
+class BadMetadata(ValueError):
+    def __init__(self, dist: importlib.metadata.Distribution, *, reason: str) -> None:
+        self.dist = dist
+        self.reason = reason
+
+    def __str__(self) -> str:
+        return f"Bad metadata in {self.dist} ({self.reason})"
+
+
+class BasePath(Protocol):
+    """A protocol that various path objects conform.
+
+    This exists because importlib.metadata uses both ``pathlib.Path`` and
+    ``zipfile.Path``, and we need a common base for type hints (Union does not
+    work well since ``zipfile.Path`` is too new for our linter setup).
+
+    This does not mean to be exhaustive, but only contains things that present
+    in both classes *that we need*.
+    """
+
+    @property
+    def name(self) -> str:
+        raise NotImplementedError()
+
+    @property
+    def parent(self) -> "BasePath":
+        raise NotImplementedError()
+
+
+def get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]:
+    """Find the path to the distribution's metadata directory.
+
+    HACK: This relies on importlib.metadata's private ``_path`` attribute. Not
+    all distributions exist on disk, so importlib.metadata is correct to not
+    expose the attribute as public. But pip's code base is old and not as clean,
+    so we do this to avoid having to rewrite too many things. Hopefully we can
+    eliminate this some day.
+    """
+    return getattr(d, "_path", None)
+
+
+def get_dist_name(dist: importlib.metadata.Distribution) -> str:
+    """Get the distribution's project name.
+
+    The ``name`` attribute is only available in Python 3.10 or later. We are
+    targeting exactly that, but Mypy does not know this.
+    """
+    name = cast(Any, dist).name
+    if not isinstance(name, str):
+        raise BadMetadata(dist, reason="invalid metadata entry 'name'")
+    return name
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/_dists.py b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/_dists.py
new file mode 100644
index 0000000..65c043c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/_dists.py
@@ -0,0 +1,224 @@
+import email.message
+import importlib.metadata
+import os
+import pathlib
+import zipfile
+from typing import (
+    Collection,
+    Dict,
+    Iterable,
+    Iterator,
+    Mapping,
+    Optional,
+    Sequence,
+    cast,
+)
+
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.exceptions import InvalidWheel, UnsupportedWheel
+from pip._internal.metadata.base import (
+    BaseDistribution,
+    BaseEntryPoint,
+    DistributionVersion,
+    InfoPath,
+    Wheel,
+)
+from pip._internal.utils.misc import normalize_path
+from pip._internal.utils.packaging import safe_extra
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
+
+from ._compat import BasePath, get_dist_name
+
+
+class WheelDistribution(importlib.metadata.Distribution):
+    """An ``importlib.metadata.Distribution`` read from a wheel.
+
+    Although ``importlib.metadata.PathDistribution`` accepts ``zipfile.Path``,
+    its implementation is too "lazy" for pip's needs (we can't keep the ZipFile
+    handle open for the entire lifetime of the distribution object).
+
+    This implementation eagerly reads the entire metadata directory into the
+    memory instead, and operates from that.
+    """
+
+    def __init__(
+        self,
+        files: Mapping[pathlib.PurePosixPath, bytes],
+        info_location: pathlib.PurePosixPath,
+    ) -> None:
+        self._files = files
+        self.info_location = info_location
+
+    @classmethod
+    def from_zipfile(
+        cls,
+        zf: zipfile.ZipFile,
+        name: str,
+        location: str,
+    ) -> "WheelDistribution":
+        info_dir, _ = parse_wheel(zf, name)
+        paths = (
+            (name, pathlib.PurePosixPath(name.split("/", 1)[-1]))
+            for name in zf.namelist()
+            if name.startswith(f"{info_dir}/")
+        )
+        files = {
+            relpath: read_wheel_metadata_file(zf, fullpath)
+            for fullpath, relpath in paths
+        }
+        info_location = pathlib.PurePosixPath(location, info_dir)
+        return cls(files, info_location)
+
+    def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]:
+        # Only allow iterating through the metadata directory.
+        if pathlib.PurePosixPath(str(path)) in self._files:
+            return iter(self._files)
+        raise FileNotFoundError(path)
+
+    def read_text(self, filename: str) -> Optional[str]:
+        try:
+            data = self._files[pathlib.PurePosixPath(filename)]
+        except KeyError:
+            return None
+        try:
+            text = data.decode("utf-8")
+        except UnicodeDecodeError as e:
+            wheel = self.info_location.parent
+            error = f"Error decoding metadata for {wheel}: {e} in {filename} file"
+            raise UnsupportedWheel(error)
+        return text
+
+
+class Distribution(BaseDistribution):
+    def __init__(
+        self,
+        dist: importlib.metadata.Distribution,
+        info_location: Optional[BasePath],
+        installed_location: Optional[BasePath],
+    ) -> None:
+        self._dist = dist
+        self._info_location = info_location
+        self._installed_location = installed_location
+
+    @classmethod
+    def from_directory(cls, directory: str) -> BaseDistribution:
+        info_location = pathlib.Path(directory)
+        dist = importlib.metadata.Distribution.at(info_location)
+        return cls(dist, info_location, info_location.parent)
+
+    @classmethod
+    def from_metadata_file_contents(
+        cls,
+        metadata_contents: bytes,
+        filename: str,
+        project_name: str,
+    ) -> BaseDistribution:
+        # Generate temp dir to contain the metadata file, and write the file contents.
+        temp_dir = pathlib.Path(
+            TempDirectory(kind="metadata", globally_managed=True).path
+        )
+        metadata_path = temp_dir / "METADATA"
+        metadata_path.write_bytes(metadata_contents)
+        # Construct dist pointing to the newly created directory.
+        dist = importlib.metadata.Distribution.at(metadata_path.parent)
+        return cls(dist, metadata_path.parent, None)
+
+    @classmethod
+    def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
+        try:
+            with wheel.as_zipfile() as zf:
+                dist = WheelDistribution.from_zipfile(zf, name, wheel.location)
+        except zipfile.BadZipFile as e:
+            raise InvalidWheel(wheel.location, name) from e
+        except UnsupportedWheel as e:
+            raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
+        return cls(dist, dist.info_location, pathlib.PurePosixPath(wheel.location))
+
+    @property
+    def location(self) -> Optional[str]:
+        if self._info_location is None:
+            return None
+        return str(self._info_location.parent)
+
+    @property
+    def info_location(self) -> Optional[str]:
+        if self._info_location is None:
+            return None
+        return str(self._info_location)
+
+    @property
+    def installed_location(self) -> Optional[str]:
+        if self._installed_location is None:
+            return None
+        return normalize_path(str(self._installed_location))
+
+    def _get_dist_name_from_location(self) -> Optional[str]:
+        """Try to get the name from the metadata directory name.
+
+        This is much faster than reading metadata.
+        """
+        if self._info_location is None:
+            return None
+        stem, suffix = os.path.splitext(self._info_location.name)
+        if suffix not in (".dist-info", ".egg-info"):
+            return None
+        return stem.split("-", 1)[0]
+
+    @property
+    def canonical_name(self) -> NormalizedName:
+        name = self._get_dist_name_from_location() or get_dist_name(self._dist)
+        return canonicalize_name(name)
+
+    @property
+    def version(self) -> DistributionVersion:
+        return parse_version(self._dist.version)
+
+    def is_file(self, path: InfoPath) -> bool:
+        return self._dist.read_text(str(path)) is not None
+
+    def iter_distutils_script_names(self) -> Iterator[str]:
+        # A distutils installation is always "flat" (not in e.g. egg form), so
+        # if this distribution's info location is NOT a pathlib.Path (but e.g.
+        # zipfile.Path), it can never contain any distutils scripts.
+        if not isinstance(self._info_location, pathlib.Path):
+            return
+        for child in self._info_location.joinpath("scripts").iterdir():
+            yield child.name
+
+    def read_text(self, path: InfoPath) -> str:
+        content = self._dist.read_text(str(path))
+        if content is None:
+            raise FileNotFoundError(path)
+        return content
+
+    def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
+        # importlib.metadata's EntryPoint structure sasitfies BaseEntryPoint.
+        return self._dist.entry_points
+
+    def _metadata_impl(self) -> email.message.Message:
+        # From Python 3.10+, importlib.metadata declares PackageMetadata as the
+        # return type. This protocol is unfortunately a disaster now and misses
+        # a ton of fields that we need, including get() and get_payload(). We
+        # rely on the implementation that the object is actually a Message now,
+        # until upstream can improve the protocol. (python/cpython#94952)
+        return cast(email.message.Message, self._dist.metadata)
+
+    def iter_provided_extras(self) -> Iterable[str]:
+        return (
+            safe_extra(extra) for extra in self.metadata.get_all("Provides-Extra", [])
+        )
+
+    def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
+        contexts: Sequence[Dict[str, str]] = [{"extra": safe_extra(e)} for e in extras]
+        for req_string in self.metadata.get_all("Requires-Dist", []):
+            req = Requirement(req_string)
+            if not req.marker:
+                yield req
+            elif not extras and req.marker.evaluate({"extra": ""}):
+                yield req
+            elif any(req.marker.evaluate(context) for context in contexts):
+                yield req
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/_envs.py b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/_envs.py
new file mode 100644
index 0000000..cbec59e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/importlib/_envs.py
@@ -0,0 +1,188 @@
+import functools
+import importlib.metadata
+import logging
+import os
+import pathlib
+import sys
+import zipfile
+import zipimport
+from typing import Iterator, List, Optional, Sequence, Set, Tuple
+
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+
+from pip._internal.metadata.base import BaseDistribution, BaseEnvironment
+from pip._internal.models.wheel import Wheel
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.filetypes import WHEEL_EXTENSION
+
+from ._compat import BadMetadata, BasePath, get_dist_name, get_info_location
+from ._dists import Distribution
+
+logger = logging.getLogger(__name__)
+
+
+def _looks_like_wheel(location: str) -> bool:
+    if not location.endswith(WHEEL_EXTENSION):
+        return False
+    if not os.path.isfile(location):
+        return False
+    if not Wheel.wheel_file_re.match(os.path.basename(location)):
+        return False
+    return zipfile.is_zipfile(location)
+
+
+class _DistributionFinder:
+    """Finder to locate distributions.
+
+    The main purpose of this class is to memoize found distributions' names, so
+    only one distribution is returned for each package name. At lot of pip code
+    assumes this (because it is setuptools's behavior), and not doing the same
+    can potentially cause a distribution in lower precedence path to override a
+    higher precedence one if the caller is not careful.
+
+    Eventually we probably want to make it possible to see lower precedence
+    installations as well. It's useful feature, after all.
+    """
+
+    FoundResult = Tuple[importlib.metadata.Distribution, Optional[BasePath]]
+
+    def __init__(self) -> None:
+        self._found_names: Set[NormalizedName] = set()
+
+    def _find_impl(self, location: str) -> Iterator[FoundResult]:
+        """Find distributions in a location."""
+        # Skip looking inside a wheel. Since a package inside a wheel is not
+        # always valid (due to .data directories etc.), its .dist-info entry
+        # should not be considered an installed distribution.
+        if _looks_like_wheel(location):
+            return
+        # To know exactly where we find a distribution, we have to feed in the
+        # paths one by one, instead of dumping the list to importlib.metadata.
+        for dist in importlib.metadata.distributions(path=[location]):
+            info_location = get_info_location(dist)
+            try:
+                raw_name = get_dist_name(dist)
+            except BadMetadata as e:
+                logger.warning("Skipping %s due to %s", info_location, e.reason)
+                continue
+            normalized_name = canonicalize_name(raw_name)
+            if normalized_name in self._found_names:
+                continue
+            self._found_names.add(normalized_name)
+            yield dist, info_location
+
+    def find(self, location: str) -> Iterator[BaseDistribution]:
+        """Find distributions in a location.
+
+        The path can be either a directory, or a ZIP archive.
+        """
+        for dist, info_location in self._find_impl(location):
+            if info_location is None:
+                installed_location: Optional[BasePath] = None
+            else:
+                installed_location = info_location.parent
+            yield Distribution(dist, info_location, installed_location)
+
+    def find_linked(self, location: str) -> Iterator[BaseDistribution]:
+        """Read location in egg-link files and return distributions in there.
+
+        The path should be a directory; otherwise this returns nothing. This
+        follows how setuptools does this for compatibility. The first non-empty
+        line in the egg-link is read as a path (resolved against the egg-link's
+        containing directory if relative). Distributions found at that linked
+        location are returned.
+        """
+        path = pathlib.Path(location)
+        if not path.is_dir():
+            return
+        for child in path.iterdir():
+            if child.suffix != ".egg-link":
+                continue
+            with child.open() as f:
+                lines = (line.strip() for line in f)
+                target_rel = next((line for line in lines if line), "")
+            if not target_rel:
+                continue
+            target_location = str(path.joinpath(target_rel))
+            for dist, info_location in self._find_impl(target_location):
+                yield Distribution(dist, info_location, path)
+
+    def _find_eggs_in_dir(self, location: str) -> Iterator[BaseDistribution]:
+        from pip._vendor.pkg_resources import find_distributions
+
+        from pip._internal.metadata import pkg_resources as legacy
+
+        with os.scandir(location) as it:
+            for entry in it:
+                if not entry.name.endswith(".egg"):
+                    continue
+                for dist in find_distributions(entry.path):
+                    yield legacy.Distribution(dist)
+
+    def _find_eggs_in_zip(self, location: str) -> Iterator[BaseDistribution]:
+        from pip._vendor.pkg_resources import find_eggs_in_zip
+
+        from pip._internal.metadata import pkg_resources as legacy
+
+        try:
+            importer = zipimport.zipimporter(location)
+        except zipimport.ZipImportError:
+            return
+        for dist in find_eggs_in_zip(importer, location):
+            yield legacy.Distribution(dist)
+
+    def find_eggs(self, location: str) -> Iterator[BaseDistribution]:
+        """Find eggs in a location.
+
+        This actually uses the old *pkg_resources* backend. We likely want to
+        deprecate this so we can eventually remove the *pkg_resources*
+        dependency entirely. Before that, this should first emit a deprecation
+        warning for some versions when using the fallback since importing
+        *pkg_resources* is slow for those who don't need it.
+        """
+        if os.path.isdir(location):
+            yield from self._find_eggs_in_dir(location)
+        if zipfile.is_zipfile(location):
+            yield from self._find_eggs_in_zip(location)
+
+
+@functools.lru_cache(maxsize=None)  # Warn a distribution exactly once.
+def _emit_egg_deprecation(location: Optional[str]) -> None:
+    deprecated(
+        reason=f"Loading egg at {location} is deprecated.",
+        replacement="to use pip for package installation.",
+        gone_in=None,
+    )
+
+
+class Environment(BaseEnvironment):
+    def __init__(self, paths: Sequence[str]) -> None:
+        self._paths = paths
+
+    @classmethod
+    def default(cls) -> BaseEnvironment:
+        return cls(sys.path)
+
+    @classmethod
+    def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
+        if paths is None:
+            return cls(sys.path)
+        return cls(paths)
+
+    def _iter_distributions(self) -> Iterator[BaseDistribution]:
+        finder = _DistributionFinder()
+        for location in self._paths:
+            yield from finder.find(location)
+            for dist in finder.find_eggs(location):
+                # _emit_egg_deprecation(dist.location)  # TODO: Enable this.
+                yield dist
+            # This must go last because that's how pkg_resources tie-breaks.
+            yield from finder.find_linked(location)
+
+    def get_distribution(self, name: str) -> Optional[BaseDistribution]:
+        matches = (
+            distribution
+            for distribution in self.iter_all_distributions()
+            if distribution.canonical_name == canonicalize_name(name)
+        )
+        return next(matches, None)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/metadata/pkg_resources.py b/venv/lib/python3.9/site-packages/pip/_internal/metadata/pkg_resources.py
new file mode 100644
index 0000000..f330ef1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/metadata/pkg_resources.py
@@ -0,0 +1,270 @@
+import email.message
+import email.parser
+import logging
+import os
+import zipfile
+from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel
+from pip._internal.utils.egg_link import egg_link_path_from_location
+from pip._internal.utils.misc import display_path, normalize_path
+from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
+
+from .base import (
+    BaseDistribution,
+    BaseEntryPoint,
+    BaseEnvironment,
+    DistributionVersion,
+    InfoPath,
+    Wheel,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class EntryPoint(NamedTuple):
+    name: str
+    value: str
+    group: str
+
+
+class InMemoryMetadata:
+    """IMetadataProvider that reads metadata files from a dictionary.
+
+    This also maps metadata decoding exceptions to our internal exception type.
+    """
+
+    def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None:
+        self._metadata = metadata
+        self._wheel_name = wheel_name
+
+    def has_metadata(self, name: str) -> bool:
+        return name in self._metadata
+
+    def get_metadata(self, name: str) -> str:
+        try:
+            return self._metadata[name].decode()
+        except UnicodeDecodeError as e:
+            # Augment the default error with the origin of the file.
+            raise UnsupportedWheel(
+                f"Error decoding metadata for {self._wheel_name}: {e} in {name} file"
+            )
+
+    def get_metadata_lines(self, name: str) -> Iterable[str]:
+        return pkg_resources.yield_lines(self.get_metadata(name))
+
+    def metadata_isdir(self, name: str) -> bool:
+        return False
+
+    def metadata_listdir(self, name: str) -> List[str]:
+        return []
+
+    def run_script(self, script_name: str, namespace: str) -> None:
+        pass
+
+
+class Distribution(BaseDistribution):
+    def __init__(self, dist: pkg_resources.Distribution) -> None:
+        self._dist = dist
+
+    @classmethod
+    def from_directory(cls, directory: str) -> BaseDistribution:
+        dist_dir = directory.rstrip(os.sep)
+
+        # Build a PathMetadata object, from path to metadata. :wink:
+        base_dir, dist_dir_name = os.path.split(dist_dir)
+        metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
+
+        # Determine the correct Distribution object type.
+        if dist_dir.endswith(".egg-info"):
+            dist_cls = pkg_resources.Distribution
+            dist_name = os.path.splitext(dist_dir_name)[0]
+        else:
+            assert dist_dir.endswith(".dist-info")
+            dist_cls = pkg_resources.DistInfoDistribution
+            dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
+
+        dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata)
+        return cls(dist)
+
+    @classmethod
+    def from_metadata_file_contents(
+        cls,
+        metadata_contents: bytes,
+        filename: str,
+        project_name: str,
+    ) -> BaseDistribution:
+        metadata_dict = {
+            "METADATA": metadata_contents,
+        }
+        dist = pkg_resources.DistInfoDistribution(
+            location=filename,
+            metadata=InMemoryMetadata(metadata_dict, filename),
+            project_name=project_name,
+        )
+        return cls(dist)
+
+    @classmethod
+    def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
+        try:
+            with wheel.as_zipfile() as zf:
+                info_dir, _ = parse_wheel(zf, name)
+                metadata_dict = {
+                    path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path)
+                    for path in zf.namelist()
+                    if path.startswith(f"{info_dir}/")
+                }
+        except zipfile.BadZipFile as e:
+            raise InvalidWheel(wheel.location, name) from e
+        except UnsupportedWheel as e:
+            raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
+        dist = pkg_resources.DistInfoDistribution(
+            location=wheel.location,
+            metadata=InMemoryMetadata(metadata_dict, wheel.location),
+            project_name=name,
+        )
+        return cls(dist)
+
+    @property
+    def location(self) -> Optional[str]:
+        return self._dist.location
+
+    @property
+    def installed_location(self) -> Optional[str]:
+        egg_link = egg_link_path_from_location(self.raw_name)
+        if egg_link:
+            location = egg_link
+        elif self.location:
+            location = self.location
+        else:
+            return None
+        return normalize_path(location)
+
+    @property
+    def info_location(self) -> Optional[str]:
+        return self._dist.egg_info
+
+    @property
+    def installed_by_distutils(self) -> bool:
+        # A distutils-installed distribution is provided by FileMetadata. This
+        # provider has a "path" attribute not present anywhere else. Not the
+        # best introspection logic, but pip has been doing this for a long time.
+        try:
+            return bool(self._dist._provider.path)
+        except AttributeError:
+            return False
+
+    @property
+    def canonical_name(self) -> NormalizedName:
+        return canonicalize_name(self._dist.project_name)
+
+    @property
+    def version(self) -> DistributionVersion:
+        return parse_version(self._dist.version)
+
+    def is_file(self, path: InfoPath) -> bool:
+        return self._dist.has_metadata(str(path))
+
+    def iter_distutils_script_names(self) -> Iterator[str]:
+        yield from self._dist.metadata_listdir("scripts")
+
+    def read_text(self, path: InfoPath) -> str:
+        name = str(path)
+        if not self._dist.has_metadata(name):
+            raise FileNotFoundError(name)
+        content = self._dist.get_metadata(name)
+        if content is None:
+            raise NoneMetadataError(self, name)
+        return content
+
+    def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
+        for group, entries in self._dist.get_entry_map().items():
+            for name, entry_point in entries.items():
+                name, _, value = str(entry_point).partition("=")
+                yield EntryPoint(name=name.strip(), value=value.strip(), group=group)
+
+    def _metadata_impl(self) -> email.message.Message:
+        """
+        :raises NoneMetadataError: if the distribution reports `has_metadata()`
+            True but `get_metadata()` returns None.
+        """
+        if isinstance(self._dist, pkg_resources.DistInfoDistribution):
+            metadata_name = "METADATA"
+        else:
+            metadata_name = "PKG-INFO"
+        try:
+            metadata = self.read_text(metadata_name)
+        except FileNotFoundError:
+            if self.location:
+                displaying_path = display_path(self.location)
+            else:
+                displaying_path = repr(self.location)
+            logger.warning("No metadata found in %s", displaying_path)
+            metadata = ""
+        feed_parser = email.parser.FeedParser()
+        feed_parser.feed(metadata)
+        return feed_parser.close()
+
+    def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
+        if extras:  # pkg_resources raises on invalid extras, so we sanitize.
+            extras = frozenset(extras).intersection(self._dist.extras)
+        return self._dist.requires(extras)
+
+    def iter_provided_extras(self) -> Iterable[str]:
+        return self._dist.extras
+
+
+class Environment(BaseEnvironment):
+    def __init__(self, ws: pkg_resources.WorkingSet) -> None:
+        self._ws = ws
+
+    @classmethod
+    def default(cls) -> BaseEnvironment:
+        return cls(pkg_resources.working_set)
+
+    @classmethod
+    def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
+        return cls(pkg_resources.WorkingSet(paths))
+
+    def _iter_distributions(self) -> Iterator[BaseDistribution]:
+        for dist in self._ws:
+            yield Distribution(dist)
+
+    def _search_distribution(self, name: str) -> Optional[BaseDistribution]:
+        """Find a distribution matching the ``name`` in the environment.
+
+        This searches from *all* distributions available in the environment, to
+        match the behavior of ``pkg_resources.get_distribution()``.
+        """
+        canonical_name = canonicalize_name(name)
+        for dist in self.iter_all_distributions():
+            if dist.canonical_name == canonical_name:
+                return dist
+        return None
+
+    def get_distribution(self, name: str) -> Optional[BaseDistribution]:
+        # Search the distribution by looking through the working set.
+        dist = self._search_distribution(name)
+        if dist:
+            return dist
+
+        # If distribution could not be found, call working_set.require to
+        # update the working set, and try to find the distribution again.
+        # This might happen for e.g. when you install a package twice, once
+        # using setup.py develop and again using setup.py install. Now when
+        # running pip uninstall twice, the package gets removed from the
+        # working set in the first uninstall, so we have to populate the
+        # working set again so that pip knows about it and the packages gets
+        # picked up and is successfully uninstalled the second time too.
+        try:
+            # We didn't pass in any version specifiers, so this can never
+            # raise pkg_resources.VersionConflict.
+            self._ws.require(name)
+        except pkg_resources.DistributionNotFound:
+            return None
+        return self._search_distribution(name)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/models/__init__.py
new file mode 100644
index 0000000..7855226
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__init__.py
@@ -0,0 +1,2 @@
+"""A package that contains models that represent entities.
+"""
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..768dc3c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/candidate.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/candidate.cpython-39.pyc
new file mode 100644
index 0000000..261d469
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/candidate.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-39.pyc
new file mode 100644
index 0000000..a92cd84
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/format_control.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/format_control.cpython-39.pyc
new file mode 100644
index 0000000..3faf707
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/format_control.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/index.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/index.cpython-39.pyc
new file mode 100644
index 0000000..9990cdd
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/index.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-39.pyc
new file mode 100644
index 0000000..44b2517
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/link.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/link.cpython-39.pyc
new file mode 100644
index 0000000..b8134a7
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/link.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/scheme.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/scheme.cpython-39.pyc
new file mode 100644
index 0000000..e364926
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/scheme.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-39.pyc
new file mode 100644
index 0000000..f758c4d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc
new file mode 100644
index 0000000..0b71282
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/target_python.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/target_python.cpython-39.pyc
new file mode 100644
index 0000000..e206093
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/target_python.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/wheel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/wheel.cpython-39.pyc
new file mode 100644
index 0000000..0c52664
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/__pycache__/wheel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/candidate.py b/venv/lib/python3.9/site-packages/pip/_internal/models/candidate.py
new file mode 100644
index 0000000..a4963ae
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/candidate.py
@@ -0,0 +1,34 @@
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.models.link import Link
+from pip._internal.utils.models import KeyBasedCompareMixin
+
+
+class InstallationCandidate(KeyBasedCompareMixin):
+    """Represents a potential "candidate" for installation."""
+
+    __slots__ = ["name", "version", "link"]
+
+    def __init__(self, name: str, version: str, link: Link) -> None:
+        self.name = name
+        self.version = parse_version(version)
+        self.link = link
+
+        super().__init__(
+            key=(self.name, self.version, self.link),
+            defining_class=InstallationCandidate,
+        )
+
+    def __repr__(self) -> str:
+        return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
+            self.name,
+            self.version,
+            self.link,
+        )
+
+    def __str__(self) -> str:
+        return "{!r} candidate (version {} at {})".format(
+            self.name,
+            self.version,
+            self.link,
+        )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/direct_url.py b/venv/lib/python3.9/site-packages/pip/_internal/models/direct_url.py
new file mode 100644
index 0000000..e75feda
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/direct_url.py
@@ -0,0 +1,212 @@
+""" PEP 610 """
+import json
+import re
+import urllib.parse
+from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union
+
+__all__ = [
+    "DirectUrl",
+    "DirectUrlValidationError",
+    "DirInfo",
+    "ArchiveInfo",
+    "VcsInfo",
+]
+
+T = TypeVar("T")
+
+DIRECT_URL_METADATA_NAME = "direct_url.json"
+ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
+
+
+class DirectUrlValidationError(Exception):
+    pass
+
+
+def _get(
+    d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
+) -> Optional[T]:
+    """Get value from dictionary and verify expected type."""
+    if key not in d:
+        return default
+    value = d[key]
+    if not isinstance(value, expected_type):
+        raise DirectUrlValidationError(
+            "{!r} has unexpected type for {} (expected {})".format(
+                value, key, expected_type
+            )
+        )
+    return value
+
+
+def _get_required(
+    d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
+) -> T:
+    value = _get(d, expected_type, key, default)
+    if value is None:
+        raise DirectUrlValidationError(f"{key} must have a value")
+    return value
+
+
+def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
+    infos = [info for info in infos if info is not None]
+    if not infos:
+        raise DirectUrlValidationError(
+            "missing one of archive_info, dir_info, vcs_info"
+        )
+    if len(infos) > 1:
+        raise DirectUrlValidationError(
+            "more than one of archive_info, dir_info, vcs_info"
+        )
+    assert infos[0] is not None
+    return infos[0]
+
+
+def _filter_none(**kwargs: Any) -> Dict[str, Any]:
+    """Make dict excluding None values."""
+    return {k: v for k, v in kwargs.items() if v is not None}
+
+
+class VcsInfo:
+    name = "vcs_info"
+
+    def __init__(
+        self,
+        vcs: str,
+        commit_id: str,
+        requested_revision: Optional[str] = None,
+    ) -> None:
+        self.vcs = vcs
+        self.requested_revision = requested_revision
+        self.commit_id = commit_id
+
+    @classmethod
+    def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
+        if d is None:
+            return None
+        return cls(
+            vcs=_get_required(d, str, "vcs"),
+            commit_id=_get_required(d, str, "commit_id"),
+            requested_revision=_get(d, str, "requested_revision"),
+        )
+
+    def _to_dict(self) -> Dict[str, Any]:
+        return _filter_none(
+            vcs=self.vcs,
+            requested_revision=self.requested_revision,
+            commit_id=self.commit_id,
+        )
+
+
+class ArchiveInfo:
+    name = "archive_info"
+
+    def __init__(
+        self,
+        hash: Optional[str] = None,
+    ) -> None:
+        self.hash = hash
+
+    @classmethod
+    def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
+        if d is None:
+            return None
+        return cls(hash=_get(d, str, "hash"))
+
+    def _to_dict(self) -> Dict[str, Any]:
+        return _filter_none(hash=self.hash)
+
+
+class DirInfo:
+    name = "dir_info"
+
+    def __init__(
+        self,
+        editable: bool = False,
+    ) -> None:
+        self.editable = editable
+
+    @classmethod
+    def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
+        if d is None:
+            return None
+        return cls(editable=_get_required(d, bool, "editable", default=False))
+
+    def _to_dict(self) -> Dict[str, Any]:
+        return _filter_none(editable=self.editable or None)
+
+
+InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
+
+
+class DirectUrl:
+    def __init__(
+        self,
+        url: str,
+        info: InfoType,
+        subdirectory: Optional[str] = None,
+    ) -> None:
+        self.url = url
+        self.info = info
+        self.subdirectory = subdirectory
+
+    def _remove_auth_from_netloc(self, netloc: str) -> str:
+        if "@" not in netloc:
+            return netloc
+        user_pass, netloc_no_user_pass = netloc.split("@", 1)
+        if (
+            isinstance(self.info, VcsInfo)
+            and self.info.vcs == "git"
+            and user_pass == "git"
+        ):
+            return netloc
+        if ENV_VAR_RE.match(user_pass):
+            return netloc
+        return netloc_no_user_pass
+
+    @property
+    def redacted_url(self) -> str:
+        """url with user:password part removed unless it is formed with
+        environment variables as specified in PEP 610, or it is ``git``
+        in the case of a git URL.
+        """
+        purl = urllib.parse.urlsplit(self.url)
+        netloc = self._remove_auth_from_netloc(purl.netloc)
+        surl = urllib.parse.urlunsplit(
+            (purl.scheme, netloc, purl.path, purl.query, purl.fragment)
+        )
+        return surl
+
+    def validate(self) -> None:
+        self.from_dict(self.to_dict())
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
+        return DirectUrl(
+            url=_get_required(d, str, "url"),
+            subdirectory=_get(d, str, "subdirectory"),
+            info=_exactly_one_of(
+                [
+                    ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
+                    DirInfo._from_dict(_get(d, dict, "dir_info")),
+                    VcsInfo._from_dict(_get(d, dict, "vcs_info")),
+                ]
+            ),
+        )
+
+    def to_dict(self) -> Dict[str, Any]:
+        res = _filter_none(
+            url=self.redacted_url,
+            subdirectory=self.subdirectory,
+        )
+        res[self.info.name] = self.info._to_dict()
+        return res
+
+    @classmethod
+    def from_json(cls, s: str) -> "DirectUrl":
+        return cls.from_dict(json.loads(s))
+
+    def to_json(self) -> str:
+        return json.dumps(self.to_dict(), sort_keys=True)
+
+    def is_local_editable(self) -> bool:
+        return isinstance(self.info, DirInfo) and self.info.editable
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/format_control.py b/venv/lib/python3.9/site-packages/pip/_internal/models/format_control.py
new file mode 100644
index 0000000..db3995e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/format_control.py
@@ -0,0 +1,80 @@
+from typing import FrozenSet, Optional, Set
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import CommandError
+
+
+class FormatControl:
+    """Helper for managing formats from which a package can be installed."""
+
+    __slots__ = ["no_binary", "only_binary"]
+
+    def __init__(
+        self,
+        no_binary: Optional[Set[str]] = None,
+        only_binary: Optional[Set[str]] = None,
+    ) -> None:
+        if no_binary is None:
+            no_binary = set()
+        if only_binary is None:
+            only_binary = set()
+
+        self.no_binary = no_binary
+        self.only_binary = only_binary
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, self.__class__):
+            return NotImplemented
+
+        if self.__slots__ != other.__slots__:
+            return False
+
+        return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
+
+    def __repr__(self) -> str:
+        return "{}({}, {})".format(
+            self.__class__.__name__, self.no_binary, self.only_binary
+        )
+
+    @staticmethod
+    def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
+        if value.startswith("-"):
+            raise CommandError(
+                "--no-binary / --only-binary option requires 1 argument."
+            )
+        new = value.split(",")
+        while ":all:" in new:
+            other.clear()
+            target.clear()
+            target.add(":all:")
+            del new[: new.index(":all:") + 1]
+            # Without a none, we want to discard everything as :all: covers it
+            if ":none:" not in new:
+                return
+        for name in new:
+            if name == ":none:":
+                target.clear()
+                continue
+            name = canonicalize_name(name)
+            other.discard(name)
+            target.add(name)
+
+    def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
+        result = {"binary", "source"}
+        if canonical_name in self.only_binary:
+            result.discard("source")
+        elif canonical_name in self.no_binary:
+            result.discard("binary")
+        elif ":all:" in self.only_binary:
+            result.discard("source")
+        elif ":all:" in self.no_binary:
+            result.discard("binary")
+        return frozenset(result)
+
+    def disallow_binaries(self) -> None:
+        self.handle_mutual_excludes(
+            ":all:",
+            self.no_binary,
+            self.only_binary,
+        )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/index.py b/venv/lib/python3.9/site-packages/pip/_internal/models/index.py
new file mode 100644
index 0000000..b94c325
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/index.py
@@ -0,0 +1,28 @@
+import urllib.parse
+
+
+class PackageIndex:
+    """Represents a Package Index and provides easier access to endpoints"""
+
+    __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
+
+    def __init__(self, url: str, file_storage_domain: str) -> None:
+        super().__init__()
+        self.url = url
+        self.netloc = urllib.parse.urlsplit(url).netloc
+        self.simple_url = self._url_for_path("simple")
+        self.pypi_url = self._url_for_path("pypi")
+
+        # This is part of a temporary hack used to block installs of PyPI
+        # packages which depend on external urls only necessary until PyPI can
+        # block such packages themselves
+        self.file_storage_domain = file_storage_domain
+
+    def _url_for_path(self, path: str) -> str:
+        return urllib.parse.urljoin(self.url, path)
+
+
+PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
+TestPyPI = PackageIndex(
+    "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
+)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/installation_report.py b/venv/lib/python3.9/site-packages/pip/_internal/models/installation_report.py
new file mode 100644
index 0000000..965f095
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/installation_report.py
@@ -0,0 +1,53 @@
+from typing import Any, Dict, Sequence
+
+from pip._vendor.packaging.markers import default_environment
+
+from pip import __version__
+from pip._internal.req.req_install import InstallRequirement
+
+
+class InstallationReport:
+    def __init__(self, install_requirements: Sequence[InstallRequirement]):
+        self._install_requirements = install_requirements
+
+    @classmethod
+    def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]:
+        assert ireq.download_info, f"No download_info for {ireq}"
+        res = {
+            # PEP 610 json for the download URL. download_info.archive_info.hash may
+            # be absent when the requirement was installed from the wheel cache
+            # and the cache entry was populated by an older pip version that did not
+            # record origin.json.
+            "download_info": ireq.download_info.to_dict(),
+            # is_direct is true if the requirement was a direct URL reference (which
+            # includes editable requirements), and false if the requirement was
+            # downloaded from a PEP 503 index or --find-links.
+            "is_direct": bool(ireq.original_link),
+            # requested is true if the requirement was specified by the user (aka
+            # top level requirement), and false if it was installed as a dependency of a
+            # requirement. https://peps.python.org/pep-0376/#requested
+            "requested": ireq.user_supplied,
+            # PEP 566 json encoding for metadata
+            # https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata
+            "metadata": ireq.get_dist().metadata_dict,
+        }
+        if ireq.user_supplied and ireq.extras:
+            # For top level requirements, the list of requested extras, if any.
+            res["requested_extras"] = list(sorted(ireq.extras))
+        return res
+
+    def to_dict(self) -> Dict[str, Any]:
+        return {
+            "version": "0",
+            "pip_version": __version__,
+            "install": [
+                self._install_req_to_dict(ireq) for ireq in self._install_requirements
+            ],
+            # https://peps.python.org/pep-0508/#environment-markers
+            # TODO: currently, the resolver uses the default environment to evaluate
+            # environment markers, so that is what we report here. In the future, it
+            # should also take into account options such as --python-version or
+            # --platform, perhaps under the form of an environment_override field?
+            # https://github.com/pypa/pip/issues/11198
+            "environment": default_environment(),
+        }
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/link.py b/venv/lib/python3.9/site-packages/pip/_internal/models/link.py
new file mode 100644
index 0000000..c792d12
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/link.py
@@ -0,0 +1,507 @@
+import functools
+import itertools
+import logging
+import os
+import posixpath
+import re
+import urllib.parse
+from dataclasses import dataclass
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Dict,
+    List,
+    Mapping,
+    NamedTuple,
+    Optional,
+    Tuple,
+    Union,
+)
+
+from pip._internal.utils.filetypes import WHEEL_EXTENSION
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.misc import (
+    pairwise,
+    redact_auth_from_url,
+    split_auth_from_netloc,
+    splitext,
+)
+from pip._internal.utils.models import KeyBasedCompareMixin
+from pip._internal.utils.urls import path_to_url, url_to_path
+
+if TYPE_CHECKING:
+    from pip._internal.index.collector import IndexContent
+
+logger = logging.getLogger(__name__)
+
+
+# Order matters, earlier hashes have a precedence over later hashes for what
+# we will pick to use.
+_SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
+
+
+@dataclass(frozen=True)
+class LinkHash:
+    """Links to content may have embedded hash values. This class parses those.
+
+    `name` must be any member of `_SUPPORTED_HASHES`.
+
+    This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
+    be JSON-serializable to conform to PEP 610, this class contains the logic for
+    parsing a hash name and value for correctness, and then checking whether that hash
+    conforms to a schema with `.is_hash_allowed()`."""
+
+    name: str
+    value: str
+
+    _hash_re = re.compile(
+        # NB: we do not validate that the second group (.*) is a valid hex
+        # digest. Instead, we simply keep that string in this class, and then check it
+        # against Hashes when hash-checking is needed. This is easier to debug than
+        # proactively discarding an invalid hex digest, as we handle incorrect hashes
+        # and malformed hashes in the same place.
+        r"({choices})=(.*)".format(
+            choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
+        ),
+    )
+
+    def __post_init__(self) -> None:
+        assert self._hash_re.match(f"{self.name}={self.value}")
+
+    @classmethod
+    @functools.lru_cache(maxsize=None)
+    def split_hash_name_and_value(cls, url: str) -> Optional["LinkHash"]:
+        """Search a string for a checksum algorithm name and encoded output value."""
+        match = cls._hash_re.search(url)
+        if match is None:
+            return None
+        name, value = match.groups()
+        return cls(name=name, value=value)
+
+    def as_hashes(self) -> Hashes:
+        """Return a Hashes instance which checks only for the current hash."""
+        return Hashes({self.name: [self.value]})
+
+    def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
+        """
+        Return True if the current hash is allowed by `hashes`.
+        """
+        if hashes is None:
+            return False
+        return hashes.is_hash_allowed(self.name, hex_digest=self.value)
+
+
+def _clean_url_path_part(part: str) -> str:
+    """
+    Clean a "part" of a URL path (i.e. after splitting on "@" characters).
+    """
+    # We unquote prior to quoting to make sure nothing is double quoted.
+    return urllib.parse.quote(urllib.parse.unquote(part))
+
+
+def _clean_file_url_path(part: str) -> str:
+    """
+    Clean the first part of a URL path that corresponds to a local
+    filesystem path (i.e. the first part after splitting on "@" characters).
+    """
+    # We unquote prior to quoting to make sure nothing is double quoted.
+    # Also, on Windows the path part might contain a drive letter which
+    # should not be quoted. On Linux where drive letters do not
+    # exist, the colon should be quoted. We rely on urllib.request
+    # to do the right thing here.
+    return urllib.request.pathname2url(urllib.request.url2pathname(part))
+
+
+# percent-encoded:                   /
+_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
+
+
+def _clean_url_path(path: str, is_local_path: bool) -> str:
+    """
+    Clean the path portion of a URL.
+    """
+    if is_local_path:
+        clean_func = _clean_file_url_path
+    else:
+        clean_func = _clean_url_path_part
+
+    # Split on the reserved characters prior to cleaning so that
+    # revision strings in VCS URLs are properly preserved.
+    parts = _reserved_chars_re.split(path)
+
+    cleaned_parts = []
+    for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
+        cleaned_parts.append(clean_func(to_clean))
+        # Normalize %xx escapes (e.g. %2f -> %2F)
+        cleaned_parts.append(reserved.upper())
+
+    return "".join(cleaned_parts)
+
+
+def _ensure_quoted_url(url: str) -> str:
+    """
+    Make sure a link is fully quoted.
+    For example, if ' ' occurs in the URL, it will be replaced with "%20",
+    and without double-quoting other characters.
+    """
+    # Split the URL into parts according to the general structure
+    # `scheme://netloc/path;parameters?query#fragment`.
+    result = urllib.parse.urlparse(url)
+    # If the netloc is empty, then the URL refers to a local filesystem path.
+    is_local_path = not result.netloc
+    path = _clean_url_path(result.path, is_local_path=is_local_path)
+    return urllib.parse.urlunparse(result._replace(path=path))
+
+
+class Link(KeyBasedCompareMixin):
+    """Represents a parsed link from a Package Index's simple URL"""
+
+    __slots__ = [
+        "_parsed_url",
+        "_url",
+        "_hashes",
+        "comes_from",
+        "requires_python",
+        "yanked_reason",
+        "dist_info_metadata",
+        "link_hash",
+        "cache_link_parsing",
+    ]
+
+    def __init__(
+        self,
+        url: str,
+        comes_from: Optional[Union[str, "IndexContent"]] = None,
+        requires_python: Optional[str] = None,
+        yanked_reason: Optional[str] = None,
+        dist_info_metadata: Optional[str] = None,
+        link_hash: Optional[LinkHash] = None,
+        cache_link_parsing: bool = True,
+        hashes: Optional[Mapping[str, str]] = None,
+    ) -> None:
+        """
+        :param url: url of the resource pointed to (href of the link)
+        :param comes_from: instance of IndexContent where the link was found,
+            or string.
+        :param requires_python: String containing the `Requires-Python`
+            metadata field, specified in PEP 345. This may be specified by
+            a data-requires-python attribute in the HTML link tag, as
+            described in PEP 503.
+        :param yanked_reason: the reason the file has been yanked, if the
+            file has been yanked, or None if the file hasn't been yanked.
+            This is the value of the "data-yanked" attribute, if present, in
+            a simple repository HTML link. If the file has been yanked but
+            no reason was provided, this should be the empty string. See
+            PEP 592 for more information and the specification.
+        :param dist_info_metadata: the metadata attached to the file, or None if no such
+            metadata is provided. This is the value of the "data-dist-info-metadata"
+            attribute, if present, in a simple repository HTML link. This may be parsed
+            into its own `Link` by `self.metadata_link()`. See PEP 658 for more
+            information and the specification.
+        :param link_hash: a checksum for the content the link points to. If not
+            provided, this will be extracted from the link URL, if the URL has
+            any checksum.
+        :param cache_link_parsing: A flag that is used elsewhere to determine
+                                   whether resources retrieved from this link
+                                   should be cached. PyPI index urls should
+                                   generally have this set to False, for
+                                   example.
+        :param hashes: A mapping of hash names to digests to allow us to
+                       determine the validity of a download.
+        """
+
+        # url can be a UNC windows share
+        if url.startswith("\\\\"):
+            url = path_to_url(url)
+
+        self._parsed_url = urllib.parse.urlsplit(url)
+        # Store the url as a private attribute to prevent accidentally
+        # trying to set a new value.
+        self._url = url
+        self._hashes = hashes if hashes is not None else {}
+
+        self.comes_from = comes_from
+        self.requires_python = requires_python if requires_python else None
+        self.yanked_reason = yanked_reason
+        self.dist_info_metadata = dist_info_metadata
+        self.link_hash = link_hash or LinkHash.split_hash_name_and_value(self._url)
+
+        super().__init__(key=url, defining_class=Link)
+
+        self.cache_link_parsing = cache_link_parsing
+
+    @classmethod
+    def from_json(
+        cls,
+        file_data: Dict[str, Any],
+        page_url: str,
+    ) -> Optional["Link"]:
+        """
+        Convert an pypi json document from a simple repository page into a Link.
+        """
+        file_url = file_data.get("url")
+        if file_url is None:
+            return None
+
+        url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
+        pyrequire = file_data.get("requires-python")
+        yanked_reason = file_data.get("yanked")
+        dist_info_metadata = file_data.get("dist-info-metadata")
+        hashes = file_data.get("hashes", {})
+
+        # The Link.yanked_reason expects an empty string instead of a boolean.
+        if yanked_reason and not isinstance(yanked_reason, str):
+            yanked_reason = ""
+        # The Link.yanked_reason expects None instead of False.
+        elif not yanked_reason:
+            yanked_reason = None
+
+        return cls(
+            url,
+            comes_from=page_url,
+            requires_python=pyrequire,
+            yanked_reason=yanked_reason,
+            hashes=hashes,
+            dist_info_metadata=dist_info_metadata,
+        )
+
+    @classmethod
+    def from_element(
+        cls,
+        anchor_attribs: Dict[str, Optional[str]],
+        page_url: str,
+        base_url: str,
+    ) -> Optional["Link"]:
+        """
+        Convert an anchor element's attributes in a simple repository page to a Link.
+        """
+        href = anchor_attribs.get("href")
+        if not href:
+            return None
+
+        url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
+        pyrequire = anchor_attribs.get("data-requires-python")
+        yanked_reason = anchor_attribs.get("data-yanked")
+        dist_info_metadata = anchor_attribs.get("data-dist-info-metadata")
+
+        return cls(
+            url,
+            comes_from=page_url,
+            requires_python=pyrequire,
+            yanked_reason=yanked_reason,
+            dist_info_metadata=dist_info_metadata,
+        )
+
+    def __str__(self) -> str:
+        if self.requires_python:
+            rp = f" (requires-python:{self.requires_python})"
+        else:
+            rp = ""
+        if self.comes_from:
+            return "{} (from {}){}".format(
+                redact_auth_from_url(self._url), self.comes_from, rp
+            )
+        else:
+            return redact_auth_from_url(str(self._url))
+
+    def __repr__(self) -> str:
+        return f"<Link {self}>"
+
+    @property
+    def url(self) -> str:
+        return self._url
+
+    @property
+    def filename(self) -> str:
+        path = self.path.rstrip("/")
+        name = posixpath.basename(path)
+        if not name:
+            # Make sure we don't leak auth information if the netloc
+            # includes a username and password.
+            netloc, user_pass = split_auth_from_netloc(self.netloc)
+            return netloc
+
+        name = urllib.parse.unquote(name)
+        assert name, f"URL {self._url!r} produced no filename"
+        return name
+
+    @property
+    def file_path(self) -> str:
+        return url_to_path(self.url)
+
+    @property
+    def scheme(self) -> str:
+        return self._parsed_url.scheme
+
+    @property
+    def netloc(self) -> str:
+        """
+        This can contain auth information.
+        """
+        return self._parsed_url.netloc
+
+    @property
+    def path(self) -> str:
+        return urllib.parse.unquote(self._parsed_url.path)
+
+    def splitext(self) -> Tuple[str, str]:
+        return splitext(posixpath.basename(self.path.rstrip("/")))
+
+    @property
+    def ext(self) -> str:
+        return self.splitext()[1]
+
+    @property
+    def url_without_fragment(self) -> str:
+        scheme, netloc, path, query, fragment = self._parsed_url
+        return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
+
+    _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
+
+    @property
+    def egg_fragment(self) -> Optional[str]:
+        match = self._egg_fragment_re.search(self._url)
+        if not match:
+            return None
+        return match.group(1)
+
+    _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
+
+    @property
+    def subdirectory_fragment(self) -> Optional[str]:
+        match = self._subdirectory_fragment_re.search(self._url)
+        if not match:
+            return None
+        return match.group(1)
+
+    def metadata_link(self) -> Optional["Link"]:
+        """Implementation of PEP 658 parsing."""
+        # Note that Link.from_element() parsing the "data-dist-info-metadata" attribute
+        # from an HTML anchor tag is typically how the Link.dist_info_metadata attribute
+        # gets set.
+        if self.dist_info_metadata is None:
+            return None
+        metadata_url = f"{self.url_without_fragment}.metadata"
+        link_hash: Optional[LinkHash] = None
+        # If data-dist-info-metadata="true" is set, then the metadata file exists,
+        # but there is no information about its checksum or anything else.
+        if self.dist_info_metadata != "true":
+            link_hash = LinkHash.split_hash_name_and_value(self.dist_info_metadata)
+        return Link(metadata_url, link_hash=link_hash)
+
+    def as_hashes(self) -> Optional[Hashes]:
+        if self.link_hash is not None:
+            return self.link_hash.as_hashes()
+        return None
+
+    @property
+    def hash(self) -> Optional[str]:
+        if self.link_hash is not None:
+            return self.link_hash.value
+        return None
+
+    @property
+    def hash_name(self) -> Optional[str]:
+        if self.link_hash is not None:
+            return self.link_hash.name
+        return None
+
+    @property
+    def show_url(self) -> str:
+        return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
+
+    @property
+    def is_file(self) -> bool:
+        return self.scheme == "file"
+
+    def is_existing_dir(self) -> bool:
+        return self.is_file and os.path.isdir(self.file_path)
+
+    @property
+    def is_wheel(self) -> bool:
+        return self.ext == WHEEL_EXTENSION
+
+    @property
+    def is_vcs(self) -> bool:
+        from pip._internal.vcs import vcs
+
+        return self.scheme in vcs.all_schemes
+
+    @property
+    def is_yanked(self) -> bool:
+        return self.yanked_reason is not None
+
+    @property
+    def has_hash(self) -> bool:
+        return self.link_hash is not None
+
+    def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
+        """
+        Return True if the link has a hash and it is allowed by `hashes`.
+        """
+        if self.link_hash is None:
+            return False
+        return self.link_hash.is_hash_allowed(hashes)
+
+
+class _CleanResult(NamedTuple):
+    """Convert link for equivalency check.
+
+    This is used in the resolver to check whether two URL-specified requirements
+    likely point to the same distribution and can be considered equivalent. This
+    equivalency logic avoids comparing URLs literally, which can be too strict
+    (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
+
+    Currently this does three things:
+
+    1. Drop the basic auth part. This is technically wrong since a server can
+       serve different content based on auth, but if it does that, it is even
+       impossible to guarantee two URLs without auth are equivalent, since
+       the user can input different auth information when prompted. So the
+       practical solution is to assume the auth doesn't affect the response.
+    2. Parse the query to avoid the ordering issue. Note that ordering under the
+       same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
+       still considered different.
+    3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
+       hash values, since it should have no impact the downloaded content. Note
+       that this drops the "egg=" part historically used to denote the requested
+       project (and extras), which is wrong in the strictest sense, but too many
+       people are supplying it inconsistently to cause superfluous resolution
+       conflicts, so we choose to also ignore them.
+    """
+
+    parsed: urllib.parse.SplitResult
+    query: Dict[str, List[str]]
+    subdirectory: str
+    hashes: Dict[str, str]
+
+
+def _clean_link(link: Link) -> _CleanResult:
+    parsed = link._parsed_url
+    netloc = parsed.netloc.rsplit("@", 1)[-1]
+    # According to RFC 8089, an empty host in file: means localhost.
+    if parsed.scheme == "file" and not netloc:
+        netloc = "localhost"
+    fragment = urllib.parse.parse_qs(parsed.fragment)
+    if "egg" in fragment:
+        logger.debug("Ignoring egg= fragment in %s", link)
+    try:
+        # If there are multiple subdirectory values, use the first one.
+        # This matches the behavior of Link.subdirectory_fragment.
+        subdirectory = fragment["subdirectory"][0]
+    except (IndexError, KeyError):
+        subdirectory = ""
+    # If there are multiple hash values under the same algorithm, use the
+    # first one. This matches the behavior of Link.hash_value.
+    hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
+    return _CleanResult(
+        parsed=parsed._replace(netloc=netloc, query="", fragment=""),
+        query=urllib.parse.parse_qs(parsed.query),
+        subdirectory=subdirectory,
+        hashes=hashes,
+    )
+
+
+@functools.lru_cache(maxsize=None)
+def links_equivalent(link1: Link, link2: Link) -> bool:
+    return _clean_link(link1) == _clean_link(link2)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/scheme.py b/venv/lib/python3.9/site-packages/pip/_internal/models/scheme.py
new file mode 100644
index 0000000..f51190a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/scheme.py
@@ -0,0 +1,31 @@
+"""
+For types associated with installation schemes.
+
+For a general overview of available schemes and their context, see
+https://docs.python.org/3/install/index.html#alternate-installation.
+"""
+
+
+SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
+
+
+class Scheme:
+    """A Scheme holds paths which are used as the base directories for
+    artifacts associated with a Python package.
+    """
+
+    __slots__ = SCHEME_KEYS
+
+    def __init__(
+        self,
+        platlib: str,
+        purelib: str,
+        headers: str,
+        scripts: str,
+        data: str,
+    ) -> None:
+        self.platlib = platlib
+        self.purelib = purelib
+        self.headers = headers
+        self.scripts = scripts
+        self.data = data
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/search_scope.py b/venv/lib/python3.9/site-packages/pip/_internal/models/search_scope.py
new file mode 100644
index 0000000..a64af73
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/search_scope.py
@@ -0,0 +1,133 @@
+import itertools
+import logging
+import os
+import posixpath
+import urllib.parse
+from typing import List
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.models.index import PyPI
+from pip._internal.utils.compat import has_tls
+from pip._internal.utils.misc import normalize_path, redact_auth_from_url
+
+logger = logging.getLogger(__name__)
+
+
+class SearchScope:
+
+    """
+    Encapsulates the locations that pip is configured to search.
+    """
+
+    __slots__ = ["find_links", "index_urls", "no_index"]
+
+    @classmethod
+    def create(
+        cls,
+        find_links: List[str],
+        index_urls: List[str],
+        no_index: bool,
+    ) -> "SearchScope":
+        """
+        Create a SearchScope object after normalizing the `find_links`.
+        """
+        # Build find_links. If an argument starts with ~, it may be
+        # a local file relative to a home directory. So try normalizing
+        # it and if it exists, use the normalized version.
+        # This is deliberately conservative - it might be fine just to
+        # blindly normalize anything starting with a ~...
+        built_find_links: List[str] = []
+        for link in find_links:
+            if link.startswith("~"):
+                new_link = normalize_path(link)
+                if os.path.exists(new_link):
+                    link = new_link
+            built_find_links.append(link)
+
+        # If we don't have TLS enabled, then WARN if anyplace we're looking
+        # relies on TLS.
+        if not has_tls():
+            for link in itertools.chain(index_urls, built_find_links):
+                parsed = urllib.parse.urlparse(link)
+                if parsed.scheme == "https":
+                    logger.warning(
+                        "pip is configured with locations that require "
+                        "TLS/SSL, however the ssl module in Python is not "
+                        "available."
+                    )
+                    break
+
+        return cls(
+            find_links=built_find_links,
+            index_urls=index_urls,
+            no_index=no_index,
+        )
+
+    def __init__(
+        self,
+        find_links: List[str],
+        index_urls: List[str],
+        no_index: bool,
+    ) -> None:
+        self.find_links = find_links
+        self.index_urls = index_urls
+        self.no_index = no_index
+
+    def get_formatted_locations(self) -> str:
+        lines = []
+        redacted_index_urls = []
+        if self.index_urls and self.index_urls != [PyPI.simple_url]:
+            for url in self.index_urls:
+
+                redacted_index_url = redact_auth_from_url(url)
+
+                # Parse the URL
+                purl = urllib.parse.urlsplit(redacted_index_url)
+
+                # URL is generally invalid if scheme and netloc is missing
+                # there are issues with Python and URL parsing, so this test
+                # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
+                # always parse invalid URLs correctly - it should raise
+                # exceptions for malformed URLs
+                if not purl.scheme and not purl.netloc:
+                    logger.warning(
+                        'The index url "%s" seems invalid, please provide a scheme.',
+                        redacted_index_url,
+                    )
+
+                redacted_index_urls.append(redacted_index_url)
+
+            lines.append(
+                "Looking in indexes: {}".format(", ".join(redacted_index_urls))
+            )
+
+        if self.find_links:
+            lines.append(
+                "Looking in links: {}".format(
+                    ", ".join(redact_auth_from_url(url) for url in self.find_links)
+                )
+            )
+        return "\n".join(lines)
+
+    def get_index_urls_locations(self, project_name: str) -> List[str]:
+        """Returns the locations found via self.index_urls
+
+        Checks the url_name on the main (first in the list) index and
+        use this url_name to produce all locations
+        """
+
+        def mkurl_pypi_url(url: str) -> str:
+            loc = posixpath.join(
+                url, urllib.parse.quote(canonicalize_name(project_name))
+            )
+            # For maximum compatibility with easy_install, ensure the path
+            # ends in a trailing slash.  Although this isn't in the spec
+            # (and PyPI can handle it without the slash) some other index
+            # implementations might break if they relied on easy_install's
+            # behavior.
+            if not loc.endswith("/"):
+                loc = loc + "/"
+            return loc
+
+        return [mkurl_pypi_url(url) for url in self.index_urls]
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/selection_prefs.py b/venv/lib/python3.9/site-packages/pip/_internal/models/selection_prefs.py
new file mode 100644
index 0000000..977bc4c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/selection_prefs.py
@@ -0,0 +1,51 @@
+from typing import Optional
+
+from pip._internal.models.format_control import FormatControl
+
+
+class SelectionPreferences:
+    """
+    Encapsulates the candidate selection preferences for downloading
+    and installing files.
+    """
+
+    __slots__ = [
+        "allow_yanked",
+        "allow_all_prereleases",
+        "format_control",
+        "prefer_binary",
+        "ignore_requires_python",
+    ]
+
+    # Don't include an allow_yanked default value to make sure each call
+    # site considers whether yanked releases are allowed. This also causes
+    # that decision to be made explicit in the calling code, which helps
+    # people when reading the code.
+    def __init__(
+        self,
+        allow_yanked: bool,
+        allow_all_prereleases: bool = False,
+        format_control: Optional[FormatControl] = None,
+        prefer_binary: bool = False,
+        ignore_requires_python: Optional[bool] = None,
+    ) -> None:
+        """Create a SelectionPreferences object.
+
+        :param allow_yanked: Whether files marked as yanked (in the sense
+            of PEP 592) are permitted to be candidates for install.
+        :param format_control: A FormatControl object or None. Used to control
+            the selection of source packages / binary packages when consulting
+            the index and links.
+        :param prefer_binary: Whether to prefer an old, but valid, binary
+            dist over a new source dist.
+        :param ignore_requires_python: Whether to ignore incompatible
+            "Requires-Python" values in links. Defaults to False.
+        """
+        if ignore_requires_python is None:
+            ignore_requires_python = False
+
+        self.allow_yanked = allow_yanked
+        self.allow_all_prereleases = allow_all_prereleases
+        self.format_control = format_control
+        self.prefer_binary = prefer_binary
+        self.ignore_requires_python = ignore_requires_python
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/target_python.py b/venv/lib/python3.9/site-packages/pip/_internal/models/target_python.py
new file mode 100644
index 0000000..744bd7e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/target_python.py
@@ -0,0 +1,110 @@
+import sys
+from typing import List, Optional, Tuple
+
+from pip._vendor.packaging.tags import Tag
+
+from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
+from pip._internal.utils.misc import normalize_version_info
+
+
+class TargetPython:
+
+    """
+    Encapsulates the properties of a Python interpreter one is targeting
+    for a package install, download, etc.
+    """
+
+    __slots__ = [
+        "_given_py_version_info",
+        "abis",
+        "implementation",
+        "platforms",
+        "py_version",
+        "py_version_info",
+        "_valid_tags",
+    ]
+
+    def __init__(
+        self,
+        platforms: Optional[List[str]] = None,
+        py_version_info: Optional[Tuple[int, ...]] = None,
+        abis: Optional[List[str]] = None,
+        implementation: Optional[str] = None,
+    ) -> None:
+        """
+        :param platforms: A list of strings or None. If None, searches for
+            packages that are supported by the current system. Otherwise, will
+            find packages that can be built on the platforms passed in. These
+            packages will only be downloaded for distribution: they will
+            not be built locally.
+        :param py_version_info: An optional tuple of ints representing the
+            Python version information to use (e.g. `sys.version_info[:3]`).
+            This can have length 1, 2, or 3 when provided.
+        :param abis: A list of strings or None. This is passed to
+            compatibility_tags.py's get_supported() function as is.
+        :param implementation: A string or None. This is passed to
+            compatibility_tags.py's get_supported() function as is.
+        """
+        # Store the given py_version_info for when we call get_supported().
+        self._given_py_version_info = py_version_info
+
+        if py_version_info is None:
+            py_version_info = sys.version_info[:3]
+        else:
+            py_version_info = normalize_version_info(py_version_info)
+
+        py_version = ".".join(map(str, py_version_info[:2]))
+
+        self.abis = abis
+        self.implementation = implementation
+        self.platforms = platforms
+        self.py_version = py_version
+        self.py_version_info = py_version_info
+
+        # This is used to cache the return value of get_tags().
+        self._valid_tags: Optional[List[Tag]] = None
+
+    def format_given(self) -> str:
+        """
+        Format the given, non-None attributes for display.
+        """
+        display_version = None
+        if self._given_py_version_info is not None:
+            display_version = ".".join(
+                str(part) for part in self._given_py_version_info
+            )
+
+        key_values = [
+            ("platforms", self.platforms),
+            ("version_info", display_version),
+            ("abis", self.abis),
+            ("implementation", self.implementation),
+        ]
+        return " ".join(
+            f"{key}={value!r}" for key, value in key_values if value is not None
+        )
+
+    def get_tags(self) -> List[Tag]:
+        """
+        Return the supported PEP 425 tags to check wheel candidates against.
+
+        The tags are returned in order of preference (most preferred first).
+        """
+        if self._valid_tags is None:
+            # Pass versions=None if no py_version_info was given since
+            # versions=None uses special default logic.
+            py_version_info = self._given_py_version_info
+            if py_version_info is None:
+                version = None
+            else:
+                version = version_info_to_nodot(py_version_info)
+
+            tags = get_supported(
+                version=version,
+                platforms=self.platforms,
+                abis=self.abis,
+                impl=self.implementation,
+            )
+            self._valid_tags = tags
+
+        return self._valid_tags
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/models/wheel.py b/venv/lib/python3.9/site-packages/pip/_internal/models/wheel.py
new file mode 100644
index 0000000..a5dc12b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/models/wheel.py
@@ -0,0 +1,92 @@
+"""Represents a wheel file and provides access to the various parts of the
+name that have meaning.
+"""
+import re
+from typing import Dict, Iterable, List
+
+from pip._vendor.packaging.tags import Tag
+
+from pip._internal.exceptions import InvalidWheelFilename
+
+
+class Wheel:
+    """A wheel file"""
+
+    wheel_file_re = re.compile(
+        r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?))
+        ((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?)
+        \.whl|\.dist-info)$""",
+        re.VERBOSE,
+    )
+
+    def __init__(self, filename: str) -> None:
+        """
+        :raises InvalidWheelFilename: when the filename is invalid for a wheel
+        """
+        wheel_info = self.wheel_file_re.match(filename)
+        if not wheel_info:
+            raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
+        self.filename = filename
+        self.name = wheel_info.group("name").replace("_", "-")
+        # we'll assume "_" means "-" due to wheel naming scheme
+        # (https://github.com/pypa/pip/issues/1150)
+        self.version = wheel_info.group("ver").replace("_", "-")
+        self.build_tag = wheel_info.group("build")
+        self.pyversions = wheel_info.group("pyver").split(".")
+        self.abis = wheel_info.group("abi").split(".")
+        self.plats = wheel_info.group("plat").split(".")
+
+        # All the tag combinations from this file
+        self.file_tags = {
+            Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
+        }
+
+    def get_formatted_file_tags(self) -> List[str]:
+        """Return the wheel's tags as a sorted list of strings."""
+        return sorted(str(tag) for tag in self.file_tags)
+
+    def support_index_min(self, tags: List[Tag]) -> int:
+        """Return the lowest index that one of the wheel's file_tag combinations
+        achieves in the given list of supported tags.
+
+        For example, if there are 8 supported tags and one of the file tags
+        is first in the list, then return 0.
+
+        :param tags: the PEP 425 tags to check the wheel against, in order
+            with most preferred first.
+
+        :raises ValueError: If none of the wheel's file tags match one of
+            the supported tags.
+        """
+        try:
+            return next(i for i, t in enumerate(tags) if t in self.file_tags)
+        except StopIteration:
+            raise ValueError()
+
+    def find_most_preferred_tag(
+        self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
+    ) -> int:
+        """Return the priority of the most preferred tag that one of the wheel's file
+        tag combinations achieves in the given list of supported tags using the given
+        tag_to_priority mapping, where lower priorities are more-preferred.
+
+        This is used in place of support_index_min in some cases in order to avoid
+        an expensive linear scan of a large list of tags.
+
+        :param tags: the PEP 425 tags to check the wheel against.
+        :param tag_to_priority: a mapping from tag to priority of that tag, where
+            lower is more preferred.
+
+        :raises ValueError: If none of the wheel's file tags match one of
+            the supported tags.
+        """
+        return min(
+            tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority
+        )
+
+    def supported(self, tags: Iterable[Tag]) -> bool:
+        """Return whether the wheel is compatible with one of the given tags.
+
+        :param tags: the PEP 425 tags to check the wheel against.
+        """
+        return not self.file_tags.isdisjoint(tags)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/network/__init__.py
new file mode 100644
index 0000000..b51bde9
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/__init__.py
@@ -0,0 +1,2 @@
+"""Contains purely network-related utilities.
+"""
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..ea838d8
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/auth.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/auth.cpython-39.pyc
new file mode 100644
index 0000000..cb7e04d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/auth.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/cache.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/cache.cpython-39.pyc
new file mode 100644
index 0000000..055de3b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/cache.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/download.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/download.cpython-39.pyc
new file mode 100644
index 0000000..d59c2ff
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/download.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc
new file mode 100644
index 0000000..0316e5e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/session.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/session.cpython-39.pyc
new file mode 100644
index 0000000..470997a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/session.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/utils.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/utils.cpython-39.pyc
new file mode 100644
index 0000000..c20768b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/utils.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc
new file mode 100644
index 0000000..ff2ff04
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/auth.py b/venv/lib/python3.9/site-packages/pip/_internal/network/auth.py
new file mode 100644
index 0000000..ca42798
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/auth.py
@@ -0,0 +1,323 @@
+"""Network Authentication Helpers
+
+Contains interface (MultiDomainBasicAuth) and associated glue code for
+providing credentials in the context of network requests.
+"""
+
+import urllib.parse
+from typing import Any, Dict, List, Optional, Tuple
+
+from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
+from pip._vendor.requests.models import Request, Response
+from pip._vendor.requests.utils import get_netrc_auth
+
+from pip._internal.utils.logging import getLogger
+from pip._internal.utils.misc import (
+    ask,
+    ask_input,
+    ask_password,
+    remove_auth_from_url,
+    split_auth_netloc_from_url,
+)
+from pip._internal.vcs.versioncontrol import AuthInfo
+
+logger = getLogger(__name__)
+
+Credentials = Tuple[str, str, str]
+
+try:
+    import keyring
+except ImportError:
+    keyring = None  # type: ignore[assignment]
+except Exception as exc:
+    logger.warning(
+        "Keyring is skipped due to an exception: %s",
+        str(exc),
+    )
+    keyring = None  # type: ignore[assignment]
+
+
+def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]:
+    """Return the tuple auth for a given url from keyring."""
+    global keyring
+    if not url or not keyring:
+        return None
+
+    try:
+        try:
+            get_credential = keyring.get_credential
+        except AttributeError:
+            pass
+        else:
+            logger.debug("Getting credentials from keyring for %s", url)
+            cred = get_credential(url, username)
+            if cred is not None:
+                return cred.username, cred.password
+            return None
+
+        if username:
+            logger.debug("Getting password from keyring for %s", url)
+            password = keyring.get_password(url, username)
+            if password:
+                return username, password
+
+    except Exception as exc:
+        logger.warning(
+            "Keyring is skipped due to an exception: %s",
+            str(exc),
+        )
+        keyring = None  # type: ignore[assignment]
+    return None
+
+
+class MultiDomainBasicAuth(AuthBase):
+    def __init__(
+        self, prompting: bool = True, index_urls: Optional[List[str]] = None
+    ) -> None:
+        self.prompting = prompting
+        self.index_urls = index_urls
+        self.passwords: Dict[str, AuthInfo] = {}
+        # When the user is prompted to enter credentials and keyring is
+        # available, we will offer to save them. If the user accepts,
+        # this value is set to the credentials they entered. After the
+        # request authenticates, the caller should call
+        # ``save_credentials`` to save these.
+        self._credentials_to_save: Optional[Credentials] = None
+
+    def _get_index_url(self, url: str) -> Optional[str]:
+        """Return the original index URL matching the requested URL.
+
+        Cached or dynamically generated credentials may work against
+        the original index URL rather than just the netloc.
+
+        The provided url should have had its username and password
+        removed already. If the original index url had credentials then
+        they will be included in the return value.
+
+        Returns None if no matching index was found, or if --no-index
+        was specified by the user.
+        """
+        if not url or not self.index_urls:
+            return None
+
+        for u in self.index_urls:
+            prefix = remove_auth_from_url(u).rstrip("/") + "/"
+            if url.startswith(prefix):
+                return u
+        return None
+
+    def _get_new_credentials(
+        self,
+        original_url: str,
+        allow_netrc: bool = True,
+        allow_keyring: bool = False,
+    ) -> AuthInfo:
+        """Find and return credentials for the specified URL."""
+        # Split the credentials and netloc from the url.
+        url, netloc, url_user_password = split_auth_netloc_from_url(
+            original_url,
+        )
+
+        # Start with the credentials embedded in the url
+        username, password = url_user_password
+        if username is not None and password is not None:
+            logger.debug("Found credentials in url for %s", netloc)
+            return url_user_password
+
+        # Find a matching index url for this request
+        index_url = self._get_index_url(url)
+        if index_url:
+            # Split the credentials from the url.
+            index_info = split_auth_netloc_from_url(index_url)
+            if index_info:
+                index_url, _, index_url_user_password = index_info
+                logger.debug("Found index url %s", index_url)
+
+        # If an index URL was found, try its embedded credentials
+        if index_url and index_url_user_password[0] is not None:
+            username, password = index_url_user_password
+            if username is not None and password is not None:
+                logger.debug("Found credentials in index url for %s", netloc)
+                return index_url_user_password
+
+        # Get creds from netrc if we still don't have them
+        if allow_netrc:
+            netrc_auth = get_netrc_auth(original_url)
+            if netrc_auth:
+                logger.debug("Found credentials in netrc for %s", netloc)
+                return netrc_auth
+
+        # If we don't have a password and keyring is available, use it.
+        if allow_keyring:
+            # The index url is more specific than the netloc, so try it first
+            # fmt: off
+            kr_auth = (
+                get_keyring_auth(index_url, username) or
+                get_keyring_auth(netloc, username)
+            )
+            # fmt: on
+            if kr_auth:
+                logger.debug("Found credentials in keyring for %s", netloc)
+                return kr_auth
+
+        return username, password
+
+    def _get_url_and_credentials(
+        self, original_url: str
+    ) -> Tuple[str, Optional[str], Optional[str]]:
+        """Return the credentials to use for the provided URL.
+
+        If allowed, netrc and keyring may be used to obtain the
+        correct credentials.
+
+        Returns (url_without_credentials, username, password). Note
+        that even if the original URL contains credentials, this
+        function may return a different username and password.
+        """
+        url, netloc, _ = split_auth_netloc_from_url(original_url)
+
+        # Try to get credentials from original url
+        username, password = self._get_new_credentials(original_url)
+
+        # If credentials not found, use any stored credentials for this netloc.
+        # Do this if either the username or the password is missing.
+        # This accounts for the situation in which the user has specified
+        # the username in the index url, but the password comes from keyring.
+        if (username is None or password is None) and netloc in self.passwords:
+            un, pw = self.passwords[netloc]
+            # It is possible that the cached credentials are for a different username,
+            # in which case the cache should be ignored.
+            if username is None or username == un:
+                username, password = un, pw
+
+        if username is not None or password is not None:
+            # Convert the username and password if they're None, so that
+            # this netloc will show up as "cached" in the conditional above.
+            # Further, HTTPBasicAuth doesn't accept None, so it makes sense to
+            # cache the value that is going to be used.
+            username = username or ""
+            password = password or ""
+
+            # Store any acquired credentials.
+            self.passwords[netloc] = (username, password)
+
+        assert (
+            # Credentials were found
+            (username is not None and password is not None)
+            # Credentials were not found
+            or (username is None and password is None)
+        ), f"Could not load credentials from url: {original_url}"
+
+        return url, username, password
+
+    def __call__(self, req: Request) -> Request:
+        # Get credentials for this request
+        url, username, password = self._get_url_and_credentials(req.url)
+
+        # Set the url of the request to the url without any credentials
+        req.url = url
+
+        if username is not None and password is not None:
+            # Send the basic auth with this request
+            req = HTTPBasicAuth(username, password)(req)
+
+        # Attach a hook to handle 401 responses
+        req.register_hook("response", self.handle_401)
+
+        return req
+
+    # Factored out to allow for easy patching in tests
+    def _prompt_for_password(
+        self, netloc: str
+    ) -> Tuple[Optional[str], Optional[str], bool]:
+        username = ask_input(f"User for {netloc}: ")
+        if not username:
+            return None, None, False
+        auth = get_keyring_auth(netloc, username)
+        if auth and auth[0] is not None and auth[1] is not None:
+            return auth[0], auth[1], False
+        password = ask_password("Password: ")
+        return username, password, True
+
+    # Factored out to allow for easy patching in tests
+    def _should_save_password_to_keyring(self) -> bool:
+        if not keyring:
+            return False
+        return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
+
+    def handle_401(self, resp: Response, **kwargs: Any) -> Response:
+        # We only care about 401 responses, anything else we want to just
+        #   pass through the actual response
+        if resp.status_code != 401:
+            return resp
+
+        # We are not able to prompt the user so simply return the response
+        if not self.prompting:
+            return resp
+
+        parsed = urllib.parse.urlparse(resp.url)
+
+        # Query the keyring for credentials:
+        username, password = self._get_new_credentials(
+            resp.url,
+            allow_netrc=False,
+            allow_keyring=True,
+        )
+
+        # Prompt the user for a new username and password
+        save = False
+        if not username and not password:
+            username, password, save = self._prompt_for_password(parsed.netloc)
+
+        # Store the new username and password to use for future requests
+        self._credentials_to_save = None
+        if username is not None and password is not None:
+            self.passwords[parsed.netloc] = (username, password)
+
+            # Prompt to save the password to keyring
+            if save and self._should_save_password_to_keyring():
+                self._credentials_to_save = (parsed.netloc, username, password)
+
+        # Consume content and release the original connection to allow our new
+        #   request to reuse the same one.
+        resp.content
+        resp.raw.release_conn()
+
+        # Add our new username and password to the request
+        req = HTTPBasicAuth(username or "", password or "")(resp.request)
+        req.register_hook("response", self.warn_on_401)
+
+        # On successful request, save the credentials that were used to
+        # keyring. (Note that if the user responded "no" above, this member
+        # is not set and nothing will be saved.)
+        if self._credentials_to_save:
+            req.register_hook("response", self.save_credentials)
+
+        # Send our new request
+        new_resp = resp.connection.send(req, **kwargs)
+        new_resp.history.append(resp)
+
+        return new_resp
+
+    def warn_on_401(self, resp: Response, **kwargs: Any) -> None:
+        """Response callback to warn about incorrect credentials."""
+        if resp.status_code == 401:
+            logger.warning(
+                "401 Error, Credentials not correct for %s",
+                resp.request.url,
+            )
+
+    def save_credentials(self, resp: Response, **kwargs: Any) -> None:
+        """Response callback to save credentials on success."""
+        assert keyring is not None, "should never reach here without keyring"
+        if not keyring:
+            return
+
+        creds = self._credentials_to_save
+        self._credentials_to_save = None
+        if creds and resp.status_code < 400:
+            try:
+                logger.info("Saving credentials to keyring")
+                keyring.set_password(*creds)
+            except Exception:
+                logger.exception("Failed to save credentials")
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/cache.py b/venv/lib/python3.9/site-packages/pip/_internal/network/cache.py
new file mode 100644
index 0000000..a81a239
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/cache.py
@@ -0,0 +1,69 @@
+"""HTTP cache implementation.
+"""
+
+import os
+from contextlib import contextmanager
+from typing import Generator, Optional
+
+from pip._vendor.cachecontrol.cache import BaseCache
+from pip._vendor.cachecontrol.caches import FileCache
+from pip._vendor.requests.models import Response
+
+from pip._internal.utils.filesystem import adjacent_tmp_file, replace
+from pip._internal.utils.misc import ensure_dir
+
+
+def is_from_cache(response: Response) -> bool:
+    return getattr(response, "from_cache", False)
+
+
+@contextmanager
+def suppressed_cache_errors() -> Generator[None, None, None]:
+    """If we can't access the cache then we can just skip caching and process
+    requests as if caching wasn't enabled.
+    """
+    try:
+        yield
+    except OSError:
+        pass
+
+
+class SafeFileCache(BaseCache):
+    """
+    A file based cache which is safe to use even when the target directory may
+    not be accessible or writable.
+    """
+
+    def __init__(self, directory: str) -> None:
+        assert directory is not None, "Cache directory must not be None."
+        super().__init__()
+        self.directory = directory
+
+    def _get_cache_path(self, name: str) -> str:
+        # From cachecontrol.caches.file_cache.FileCache._fn, brought into our
+        # class for backwards-compatibility and to avoid using a non-public
+        # method.
+        hashed = FileCache.encode(name)
+        parts = list(hashed[:5]) + [hashed]
+        return os.path.join(self.directory, *parts)
+
+    def get(self, key: str) -> Optional[bytes]:
+        path = self._get_cache_path(key)
+        with suppressed_cache_errors():
+            with open(path, "rb") as f:
+                return f.read()
+
+    def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None:
+        path = self._get_cache_path(key)
+        with suppressed_cache_errors():
+            ensure_dir(os.path.dirname(path))
+
+            with adjacent_tmp_file(path) as f:
+                f.write(value)
+
+            replace(f.name, path)
+
+    def delete(self, key: str) -> None:
+        path = self._get_cache_path(key)
+        with suppressed_cache_errors():
+            os.remove(path)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/download.py b/venv/lib/python3.9/site-packages/pip/_internal/network/download.py
new file mode 100644
index 0000000..79b82a5
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/download.py
@@ -0,0 +1,186 @@
+"""Download files with progress indicators.
+"""
+import email.message
+import logging
+import mimetypes
+import os
+from typing import Iterable, Optional, Tuple
+
+from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+
+from pip._internal.cli.progress_bars import get_download_progress_renderer
+from pip._internal.exceptions import NetworkConnectionError
+from pip._internal.models.index import PyPI
+from pip._internal.models.link import Link
+from pip._internal.network.cache import is_from_cache
+from pip._internal.network.session import PipSession
+from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
+from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext
+
+logger = logging.getLogger(__name__)
+
+
+def _get_http_response_size(resp: Response) -> Optional[int]:
+    try:
+        return int(resp.headers["content-length"])
+    except (ValueError, KeyError, TypeError):
+        return None
+
+
+def _prepare_download(
+    resp: Response,
+    link: Link,
+    progress_bar: str,
+) -> Iterable[bytes]:
+    total_length = _get_http_response_size(resp)
+
+    if link.netloc == PyPI.file_storage_domain:
+        url = link.show_url
+    else:
+        url = link.url_without_fragment
+
+    logged_url = redact_auth_from_url(url)
+
+    if total_length:
+        logged_url = "{} ({})".format(logged_url, format_size(total_length))
+
+    if is_from_cache(resp):
+        logger.info("Using cached %s", logged_url)
+    else:
+        logger.info("Downloading %s", logged_url)
+
+    if logger.getEffectiveLevel() > logging.INFO:
+        show_progress = False
+    elif is_from_cache(resp):
+        show_progress = False
+    elif not total_length:
+        show_progress = True
+    elif total_length > (40 * 1000):
+        show_progress = True
+    else:
+        show_progress = False
+
+    chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)
+
+    if not show_progress:
+        return chunks
+
+    renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length)
+    return renderer(chunks)
+
+
+def sanitize_content_filename(filename: str) -> str:
+    """
+    Sanitize the "filename" value from a Content-Disposition header.
+    """
+    return os.path.basename(filename)
+
+
+def parse_content_disposition(content_disposition: str, default_filename: str) -> str:
+    """
+    Parse the "filename" value from a Content-Disposition header, and
+    return the default filename if the result is empty.
+    """
+    m = email.message.Message()
+    m["content-type"] = content_disposition
+    filename = m.get_param("filename")
+    if filename:
+        # We need to sanitize the filename to prevent directory traversal
+        # in case the filename contains ".." path parts.
+        filename = sanitize_content_filename(str(filename))
+    return filename or default_filename
+
+
+def _get_http_response_filename(resp: Response, link: Link) -> str:
+    """Get an ideal filename from the given HTTP response, falling back to
+    the link filename if not provided.
+    """
+    filename = link.filename  # fallback
+    # Have a look at the Content-Disposition header for a better guess
+    content_disposition = resp.headers.get("content-disposition")
+    if content_disposition:
+        filename = parse_content_disposition(content_disposition, filename)
+    ext: Optional[str] = splitext(filename)[1]
+    if not ext:
+        ext = mimetypes.guess_extension(resp.headers.get("content-type", ""))
+        if ext:
+            filename += ext
+    if not ext and link.url != resp.url:
+        ext = os.path.splitext(resp.url)[1]
+        if ext:
+            filename += ext
+    return filename
+
+
+def _http_get_download(session: PipSession, link: Link) -> Response:
+    target_url = link.url.split("#", 1)[0]
+    resp = session.get(target_url, headers=HEADERS, stream=True)
+    raise_for_status(resp)
+    return resp
+
+
+class Downloader:
+    def __init__(
+        self,
+        session: PipSession,
+        progress_bar: str,
+    ) -> None:
+        self._session = session
+        self._progress_bar = progress_bar
+
+    def __call__(self, link: Link, location: str) -> Tuple[str, str]:
+        """Download the file given by link into location."""
+        try:
+            resp = _http_get_download(self._session, link)
+        except NetworkConnectionError as e:
+            assert e.response is not None
+            logger.critical(
+                "HTTP error %s while getting %s", e.response.status_code, link
+            )
+            raise
+
+        filename = _get_http_response_filename(resp, link)
+        filepath = os.path.join(location, filename)
+
+        chunks = _prepare_download(resp, link, self._progress_bar)
+        with open(filepath, "wb") as content_file:
+            for chunk in chunks:
+                content_file.write(chunk)
+        content_type = resp.headers.get("Content-Type", "")
+        return filepath, content_type
+
+
+class BatchDownloader:
+    def __init__(
+        self,
+        session: PipSession,
+        progress_bar: str,
+    ) -> None:
+        self._session = session
+        self._progress_bar = progress_bar
+
+    def __call__(
+        self, links: Iterable[Link], location: str
+    ) -> Iterable[Tuple[Link, Tuple[str, str]]]:
+        """Download the files given by links into location."""
+        for link in links:
+            try:
+                resp = _http_get_download(self._session, link)
+            except NetworkConnectionError as e:
+                assert e.response is not None
+                logger.critical(
+                    "HTTP error %s while getting %s",
+                    e.response.status_code,
+                    link,
+                )
+                raise
+
+            filename = _get_http_response_filename(resp, link)
+            filepath = os.path.join(location, filename)
+
+            chunks = _prepare_download(resp, link, self._progress_bar)
+            with open(filepath, "wb") as content_file:
+                for chunk in chunks:
+                    content_file.write(chunk)
+            content_type = resp.headers.get("Content-Type", "")
+            yield link, (filepath, content_type)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/lazy_wheel.py b/venv/lib/python3.9/site-packages/pip/_internal/network/lazy_wheel.py
new file mode 100644
index 0000000..854a6fa
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/lazy_wheel.py
@@ -0,0 +1,210 @@
+"""Lazy ZIP over HTTP"""
+
+__all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"]
+
+from bisect import bisect_left, bisect_right
+from contextlib import contextmanager
+from tempfile import NamedTemporaryFile
+from typing import Any, Dict, Generator, List, Optional, Tuple
+from zipfile import BadZipfile, ZipFile
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+
+from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution
+from pip._internal.network.session import PipSession
+from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
+
+
+class HTTPRangeRequestUnsupported(Exception):
+    pass
+
+
+def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution:
+    """Return a distribution object from the given wheel URL.
+
+    This uses HTTP range requests to only fetch the portion of the wheel
+    containing metadata, just enough for the object to be constructed.
+    If such requests are not supported, HTTPRangeRequestUnsupported
+    is raised.
+    """
+    with LazyZipOverHTTP(url, session) as zf:
+        # For read-only ZIP files, ZipFile only needs methods read,
+        # seek, seekable and tell, not the whole IO protocol.
+        wheel = MemoryWheel(zf.name, zf)  # type: ignore
+        # After context manager exit, wheel.name
+        # is an invalid file by intention.
+        return get_wheel_distribution(wheel, canonicalize_name(name))
+
+
+class LazyZipOverHTTP:
+    """File-like object mapped to a ZIP file over HTTP.
+
+    This uses HTTP range requests to lazily fetch the file's content,
+    which is supposed to be fed to ZipFile.  If such requests are not
+    supported by the server, raise HTTPRangeRequestUnsupported
+    during initialization.
+    """
+
+    def __init__(
+        self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE
+    ) -> None:
+        head = session.head(url, headers=HEADERS)
+        raise_for_status(head)
+        assert head.status_code == 200
+        self._session, self._url, self._chunk_size = session, url, chunk_size
+        self._length = int(head.headers["Content-Length"])
+        self._file = NamedTemporaryFile()
+        self.truncate(self._length)
+        self._left: List[int] = []
+        self._right: List[int] = []
+        if "bytes" not in head.headers.get("Accept-Ranges", "none"):
+            raise HTTPRangeRequestUnsupported("range request is not supported")
+        self._check_zip()
+
+    @property
+    def mode(self) -> str:
+        """Opening mode, which is always rb."""
+        return "rb"
+
+    @property
+    def name(self) -> str:
+        """Path to the underlying file."""
+        return self._file.name
+
+    def seekable(self) -> bool:
+        """Return whether random access is supported, which is True."""
+        return True
+
+    def close(self) -> None:
+        """Close the file."""
+        self._file.close()
+
+    @property
+    def closed(self) -> bool:
+        """Whether the file is closed."""
+        return self._file.closed
+
+    def read(self, size: int = -1) -> bytes:
+        """Read up to size bytes from the object and return them.
+
+        As a convenience, if size is unspecified or -1,
+        all bytes until EOF are returned.  Fewer than
+        size bytes may be returned if EOF is reached.
+        """
+        download_size = max(size, self._chunk_size)
+        start, length = self.tell(), self._length
+        stop = length if size < 0 else min(start + download_size, length)
+        start = max(0, stop - download_size)
+        self._download(start, stop - 1)
+        return self._file.read(size)
+
+    def readable(self) -> bool:
+        """Return whether the file is readable, which is True."""
+        return True
+
+    def seek(self, offset: int, whence: int = 0) -> int:
+        """Change stream position and return the new absolute position.
+
+        Seek to offset relative position indicated by whence:
+        * 0: Start of stream (the default).  pos should be >= 0;
+        * 1: Current position - pos may be negative;
+        * 2: End of stream - pos usually negative.
+        """
+        return self._file.seek(offset, whence)
+
+    def tell(self) -> int:
+        """Return the current position."""
+        return self._file.tell()
+
+    def truncate(self, size: Optional[int] = None) -> int:
+        """Resize the stream to the given size in bytes.
+
+        If size is unspecified resize to the current position.
+        The current stream position isn't changed.
+
+        Return the new file size.
+        """
+        return self._file.truncate(size)
+
+    def writable(self) -> bool:
+        """Return False."""
+        return False
+
+    def __enter__(self) -> "LazyZipOverHTTP":
+        self._file.__enter__()
+        return self
+
+    def __exit__(self, *exc: Any) -> None:
+        self._file.__exit__(*exc)
+
+    @contextmanager
+    def _stay(self) -> Generator[None, None, None]:
+        """Return a context manager keeping the position.
+
+        At the end of the block, seek back to original position.
+        """
+        pos = self.tell()
+        try:
+            yield
+        finally:
+            self.seek(pos)
+
+    def _check_zip(self) -> None:
+        """Check and download until the file is a valid ZIP."""
+        end = self._length - 1
+        for start in reversed(range(0, end, self._chunk_size)):
+            self._download(start, end)
+            with self._stay():
+                try:
+                    # For read-only ZIP files, ZipFile only needs
+                    # methods read, seek, seekable and tell.
+                    ZipFile(self)  # type: ignore
+                except BadZipfile:
+                    pass
+                else:
+                    break
+
+    def _stream_response(
+        self, start: int, end: int, base_headers: Dict[str, str] = HEADERS
+    ) -> Response:
+        """Return HTTP response to a range request from start to end."""
+        headers = base_headers.copy()
+        headers["Range"] = f"bytes={start}-{end}"
+        # TODO: Get range requests to be correctly cached
+        headers["Cache-Control"] = "no-cache"
+        return self._session.get(self._url, headers=headers, stream=True)
+
+    def _merge(
+        self, start: int, end: int, left: int, right: int
+    ) -> Generator[Tuple[int, int], None, None]:
+        """Return a generator of intervals to be fetched.
+
+        Args:
+            start (int): Start of needed interval
+            end (int): End of needed interval
+            left (int): Index of first overlapping downloaded data
+            right (int): Index after last overlapping downloaded data
+        """
+        lslice, rslice = self._left[left:right], self._right[left:right]
+        i = start = min([start] + lslice[:1])
+        end = max([end] + rslice[-1:])
+        for j, k in zip(lslice, rslice):
+            if j > i:
+                yield i, j - 1
+            i = k + 1
+        if i <= end:
+            yield i, end
+        self._left[left:right], self._right[left:right] = [start], [end]
+
+    def _download(self, start: int, end: int) -> None:
+        """Download bytes from start to end inclusively."""
+        with self._stay():
+            left = bisect_left(self._right, start)
+            right = bisect_right(self._left, end)
+            for start, end in self._merge(start, end, left, right):
+                response = self._stream_response(start, end)
+                response.raise_for_status()
+                self.seek(start)
+                for chunk in response_chunks(response, self._chunk_size):
+                    self._file.write(chunk)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/session.py b/venv/lib/python3.9/site-packages/pip/_internal/network/session.py
new file mode 100644
index 0000000..e512ac7
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/session.py
@@ -0,0 +1,518 @@
+"""PipSession and supporting code, containing all pip-specific
+network request configuration and behavior.
+"""
+
+import email.utils
+import io
+import ipaddress
+import json
+import logging
+import mimetypes
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import urllib.parse
+import warnings
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Dict,
+    Generator,
+    List,
+    Mapping,
+    Optional,
+    Sequence,
+    Tuple,
+    Union,
+)
+
+from pip._vendor import requests, urllib3
+from pip._vendor.cachecontrol import CacheControlAdapter as _BaseCacheControlAdapter
+from pip._vendor.requests.adapters import DEFAULT_POOLBLOCK, BaseAdapter
+from pip._vendor.requests.adapters import HTTPAdapter as _BaseHTTPAdapter
+from pip._vendor.requests.models import PreparedRequest, Response
+from pip._vendor.requests.structures import CaseInsensitiveDict
+from pip._vendor.urllib3.connectionpool import ConnectionPool
+from pip._vendor.urllib3.exceptions import InsecureRequestWarning
+
+from pip import __version__
+from pip._internal.metadata import get_default_environment
+from pip._internal.models.link import Link
+from pip._internal.network.auth import MultiDomainBasicAuth
+from pip._internal.network.cache import SafeFileCache
+
+# Import ssl from compat so the initial import occurs in only one place.
+from pip._internal.utils.compat import has_tls
+from pip._internal.utils.glibc import libc_ver
+from pip._internal.utils.misc import build_url_from_netloc, parse_netloc
+from pip._internal.utils.urls import url_to_path
+
+if TYPE_CHECKING:
+    from ssl import SSLContext
+
+    from pip._vendor.urllib3.poolmanager import PoolManager
+
+
+logger = logging.getLogger(__name__)
+
+SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
+
+
+# Ignore warning raised when using --trusted-host.
+warnings.filterwarnings("ignore", category=InsecureRequestWarning)
+
+
+SECURE_ORIGINS: List[SecureOrigin] = [
+    # protocol, hostname, port
+    # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
+    ("https", "*", "*"),
+    ("*", "localhost", "*"),
+    ("*", "127.0.0.0/8", "*"),
+    ("*", "::1/128", "*"),
+    ("file", "*", None),
+    # ssh is always secure.
+    ("ssh", "*", "*"),
+]
+
+
+# These are environment variables present when running under various
+# CI systems.  For each variable, some CI systems that use the variable
+# are indicated.  The collection was chosen so that for each of a number
+# of popular systems, at least one of the environment variables is used.
+# This list is used to provide some indication of and lower bound for
+# CI traffic to PyPI.  Thus, it is okay if the list is not comprehensive.
+# For more background, see: https://github.com/pypa/pip/issues/5499
+CI_ENVIRONMENT_VARIABLES = (
+    # Azure Pipelines
+    "BUILD_BUILDID",
+    # Jenkins
+    "BUILD_ID",
+    # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
+    "CI",
+    # Explicit environment variable.
+    "PIP_IS_CI",
+)
+
+
+def looks_like_ci() -> bool:
+    """
+    Return whether it looks like pip is running under CI.
+    """
+    # We don't use the method of checking for a tty (e.g. using isatty())
+    # because some CI systems mimic a tty (e.g. Travis CI).  Thus that
+    # method doesn't provide definitive information in either direction.
+    return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
+
+
+def user_agent() -> str:
+    """
+    Return a string representing the user agent.
+    """
+    data: Dict[str, Any] = {
+        "installer": {"name": "pip", "version": __version__},
+        "python": platform.python_version(),
+        "implementation": {
+            "name": platform.python_implementation(),
+        },
+    }
+
+    if data["implementation"]["name"] == "CPython":
+        data["implementation"]["version"] = platform.python_version()
+    elif data["implementation"]["name"] == "PyPy":
+        pypy_version_info = sys.pypy_version_info  # type: ignore
+        if pypy_version_info.releaselevel == "final":
+            pypy_version_info = pypy_version_info[:3]
+        data["implementation"]["version"] = ".".join(
+            [str(x) for x in pypy_version_info]
+        )
+    elif data["implementation"]["name"] == "Jython":
+        # Complete Guess
+        data["implementation"]["version"] = platform.python_version()
+    elif data["implementation"]["name"] == "IronPython":
+        # Complete Guess
+        data["implementation"]["version"] = platform.python_version()
+
+    if sys.platform.startswith("linux"):
+        from pip._vendor import distro
+
+        linux_distribution = distro.name(), distro.version(), distro.codename()
+        distro_infos: Dict[str, Any] = dict(
+            filter(
+                lambda x: x[1],
+                zip(["name", "version", "id"], linux_distribution),
+            )
+        )
+        libc = dict(
+            filter(
+                lambda x: x[1],
+                zip(["lib", "version"], libc_ver()),
+            )
+        )
+        if libc:
+            distro_infos["libc"] = libc
+        if distro_infos:
+            data["distro"] = distro_infos
+
+    if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
+        data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
+
+    if platform.system():
+        data.setdefault("system", {})["name"] = platform.system()
+
+    if platform.release():
+        data.setdefault("system", {})["release"] = platform.release()
+
+    if platform.machine():
+        data["cpu"] = platform.machine()
+
+    if has_tls():
+        import _ssl as ssl
+
+        data["openssl_version"] = ssl.OPENSSL_VERSION
+
+    setuptools_dist = get_default_environment().get_distribution("setuptools")
+    if setuptools_dist is not None:
+        data["setuptools_version"] = str(setuptools_dist.version)
+
+    if shutil.which("rustc") is not None:
+        # If for any reason `rustc --version` fails, silently ignore it
+        try:
+            rustc_output = subprocess.check_output(
+                ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5
+            )
+        except Exception:
+            pass
+        else:
+            if rustc_output.startswith(b"rustc "):
+                # The format of `rustc --version` is:
+                # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'`
+                # We extract just the middle (1.52.1) part
+                data["rustc_version"] = rustc_output.split(b" ")[1].decode()
+
+    # Use None rather than False so as not to give the impression that
+    # pip knows it is not being run under CI.  Rather, it is a null or
+    # inconclusive result.  Also, we include some value rather than no
+    # value to make it easier to know that the check has been run.
+    data["ci"] = True if looks_like_ci() else None
+
+    user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
+    if user_data is not None:
+        data["user_data"] = user_data
+
+    return "{data[installer][name]}/{data[installer][version]} {json}".format(
+        data=data,
+        json=json.dumps(data, separators=(",", ":"), sort_keys=True),
+    )
+
+
+class LocalFSAdapter(BaseAdapter):
+    def send(
+        self,
+        request: PreparedRequest,
+        stream: bool = False,
+        timeout: Optional[Union[float, Tuple[float, float]]] = None,
+        verify: Union[bool, str] = True,
+        cert: Optional[Union[str, Tuple[str, str]]] = None,
+        proxies: Optional[Mapping[str, str]] = None,
+    ) -> Response:
+        pathname = url_to_path(request.url)
+
+        resp = Response()
+        resp.status_code = 200
+        resp.url = request.url
+
+        try:
+            stats = os.stat(pathname)
+        except OSError as exc:
+            # format the exception raised as a io.BytesIO object,
+            # to return a better error message:
+            resp.status_code = 404
+            resp.reason = type(exc).__name__
+            resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8"))
+        else:
+            modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
+            content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
+            resp.headers = CaseInsensitiveDict(
+                {
+                    "Content-Type": content_type,
+                    "Content-Length": stats.st_size,
+                    "Last-Modified": modified,
+                }
+            )
+
+            resp.raw = open(pathname, "rb")
+            resp.close = resp.raw.close
+
+        return resp
+
+    def close(self) -> None:
+        pass
+
+
+class _SSLContextAdapterMixin:
+    """Mixin to add the ``ssl_context`` constructor argument to HTTP adapters.
+
+    The additional argument is forwarded directly to the pool manager. This allows us
+    to dynamically decide what SSL store to use at runtime, which is used to implement
+    the optional ``truststore`` backend.
+    """
+
+    def __init__(
+        self,
+        *,
+        ssl_context: Optional["SSLContext"] = None,
+        **kwargs: Any,
+    ) -> None:
+        self._ssl_context = ssl_context
+        super().__init__(**kwargs)
+
+    def init_poolmanager(
+        self,
+        connections: int,
+        maxsize: int,
+        block: bool = DEFAULT_POOLBLOCK,
+        **pool_kwargs: Any,
+    ) -> "PoolManager":
+        if self._ssl_context is not None:
+            pool_kwargs.setdefault("ssl_context", self._ssl_context)
+        return super().init_poolmanager(  # type: ignore[misc]
+            connections=connections,
+            maxsize=maxsize,
+            block=block,
+            **pool_kwargs,
+        )
+
+
+class HTTPAdapter(_SSLContextAdapterMixin, _BaseHTTPAdapter):
+    pass
+
+
+class CacheControlAdapter(_SSLContextAdapterMixin, _BaseCacheControlAdapter):
+    pass
+
+
+class InsecureHTTPAdapter(HTTPAdapter):
+    def cert_verify(
+        self,
+        conn: ConnectionPool,
+        url: str,
+        verify: Union[bool, str],
+        cert: Optional[Union[str, Tuple[str, str]]],
+    ) -> None:
+        super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
+
+
+class InsecureCacheControlAdapter(CacheControlAdapter):
+    def cert_verify(
+        self,
+        conn: ConnectionPool,
+        url: str,
+        verify: Union[bool, str],
+        cert: Optional[Union[str, Tuple[str, str]]],
+    ) -> None:
+        super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
+
+
+class PipSession(requests.Session):
+
+    timeout: Optional[int] = None
+
+    def __init__(
+        self,
+        *args: Any,
+        retries: int = 0,
+        cache: Optional[str] = None,
+        trusted_hosts: Sequence[str] = (),
+        index_urls: Optional[List[str]] = None,
+        ssl_context: Optional["SSLContext"] = None,
+        **kwargs: Any,
+    ) -> None:
+        """
+        :param trusted_hosts: Domains not to emit warnings for when not using
+            HTTPS.
+        """
+        super().__init__(*args, **kwargs)
+
+        # Namespace the attribute with "pip_" just in case to prevent
+        # possible conflicts with the base class.
+        self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = []
+
+        # Attach our User Agent to the request
+        self.headers["User-Agent"] = user_agent()
+
+        # Attach our Authentication handler to the session
+        self.auth = MultiDomainBasicAuth(index_urls=index_urls)
+
+        # Create our urllib3.Retry instance which will allow us to customize
+        # how we handle retries.
+        retries = urllib3.Retry(
+            # Set the total number of retries that a particular request can
+            # have.
+            total=retries,
+            # A 503 error from PyPI typically means that the Fastly -> Origin
+            # connection got interrupted in some way. A 503 error in general
+            # is typically considered a transient error so we'll go ahead and
+            # retry it.
+            # A 500 may indicate transient error in Amazon S3
+            # A 520 or 527 - may indicate transient error in CloudFlare
+            status_forcelist=[500, 503, 520, 527],
+            # Add a small amount of back off between failed requests in
+            # order to prevent hammering the service.
+            backoff_factor=0.25,
+        )  # type: ignore
+
+        # Our Insecure HTTPAdapter disables HTTPS validation. It does not
+        # support caching so we'll use it for all http:// URLs.
+        # If caching is disabled, we will also use it for
+        # https:// hosts that we've marked as ignoring
+        # TLS errors for (trusted-hosts).
+        insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
+
+        # We want to _only_ cache responses on securely fetched origins or when
+        # the host is specified as trusted. We do this because
+        # we can't validate the response of an insecurely/untrusted fetched
+        # origin, and we don't want someone to be able to poison the cache and
+        # require manual eviction from the cache to fix it.
+        if cache:
+            secure_adapter = CacheControlAdapter(
+                cache=SafeFileCache(cache),
+                max_retries=retries,
+                ssl_context=ssl_context,
+            )
+            self._trusted_host_adapter = InsecureCacheControlAdapter(
+                cache=SafeFileCache(cache),
+                max_retries=retries,
+            )
+        else:
+            secure_adapter = HTTPAdapter(max_retries=retries, ssl_context=ssl_context)
+            self._trusted_host_adapter = insecure_adapter
+
+        self.mount("https://", secure_adapter)
+        self.mount("http://", insecure_adapter)
+
+        # Enable file:// urls
+        self.mount("file://", LocalFSAdapter())
+
+        for host in trusted_hosts:
+            self.add_trusted_host(host, suppress_logging=True)
+
+    def update_index_urls(self, new_index_urls: List[str]) -> None:
+        """
+        :param new_index_urls: New index urls to update the authentication
+            handler with.
+        """
+        self.auth.index_urls = new_index_urls
+
+    def add_trusted_host(
+        self, host: str, source: Optional[str] = None, suppress_logging: bool = False
+    ) -> None:
+        """
+        :param host: It is okay to provide a host that has previously been
+            added.
+        :param source: An optional source string, for logging where the host
+            string came from.
+        """
+        if not suppress_logging:
+            msg = f"adding trusted host: {host!r}"
+            if source is not None:
+                msg += f" (from {source})"
+            logger.info(msg)
+
+        host_port = parse_netloc(host)
+        if host_port not in self.pip_trusted_origins:
+            self.pip_trusted_origins.append(host_port)
+
+        self.mount(
+            build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
+        )
+        self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
+        if not host_port[1]:
+            self.mount(
+                build_url_from_netloc(host, scheme="http") + ":",
+                self._trusted_host_adapter,
+            )
+            # Mount wildcard ports for the same host.
+            self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter)
+
+    def iter_secure_origins(self) -> Generator[SecureOrigin, None, None]:
+        yield from SECURE_ORIGINS
+        for host, port in self.pip_trusted_origins:
+            yield ("*", host, "*" if port is None else port)
+
+    def is_secure_origin(self, location: Link) -> bool:
+        # Determine if this url used a secure transport mechanism
+        parsed = urllib.parse.urlparse(str(location))
+        origin_protocol, origin_host, origin_port = (
+            parsed.scheme,
+            parsed.hostname,
+            parsed.port,
+        )
+
+        # The protocol to use to see if the protocol matches.
+        # Don't count the repository type as part of the protocol: in
+        # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
+        # the last scheme.)
+        origin_protocol = origin_protocol.rsplit("+", 1)[-1]
+
+        # Determine if our origin is a secure origin by looking through our
+        # hardcoded list of secure origins, as well as any additional ones
+        # configured on this PackageFinder instance.
+        for secure_origin in self.iter_secure_origins():
+            secure_protocol, secure_host, secure_port = secure_origin
+            if origin_protocol != secure_protocol and secure_protocol != "*":
+                continue
+
+            try:
+                addr = ipaddress.ip_address(origin_host or "")
+                network = ipaddress.ip_network(secure_host)
+            except ValueError:
+                # We don't have both a valid address or a valid network, so
+                # we'll check this origin against hostnames.
+                if (
+                    origin_host
+                    and origin_host.lower() != secure_host.lower()
+                    and secure_host != "*"
+                ):
+                    continue
+            else:
+                # We have a valid address and network, so see if the address
+                # is contained within the network.
+                if addr not in network:
+                    continue
+
+            # Check to see if the port matches.
+            if (
+                origin_port != secure_port
+                and secure_port != "*"
+                and secure_port is not None
+            ):
+                continue
+
+            # If we've gotten here, then this origin matches the current
+            # secure origin and we should return True
+            return True
+
+        # If we've gotten to this point, then the origin isn't secure and we
+        # will not accept it as a valid location to search. We will however
+        # log a warning that we are ignoring it.
+        logger.warning(
+            "The repository located at %s is not a trusted or secure host and "
+            "is being ignored. If this repository is available via HTTPS we "
+            "recommend you use HTTPS instead, otherwise you may silence "
+            "this warning and allow it anyway with '--trusted-host %s'.",
+            origin_host,
+            origin_host,
+        )
+
+        return False
+
+    def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response:
+        # Allow setting a default timeout on a session
+        kwargs.setdefault("timeout", self.timeout)
+        # Allow setting a default proxies on a session
+        kwargs.setdefault("proxies", self.proxies)
+
+        # Dispatch the actual request
+        return super().request(method, url, *args, **kwargs)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/utils.py b/venv/lib/python3.9/site-packages/pip/_internal/network/utils.py
new file mode 100644
index 0000000..134848a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/utils.py
@@ -0,0 +1,96 @@
+from typing import Dict, Generator
+
+from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+
+from pip._internal.exceptions import NetworkConnectionError
+
+# The following comments and HTTP headers were originally added by
+# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03.
+#
+# We use Accept-Encoding: identity here because requests defaults to
+# accepting compressed responses. This breaks in a variety of ways
+# depending on how the server is configured.
+# - Some servers will notice that the file isn't a compressible file
+#   and will leave the file alone and with an empty Content-Encoding
+# - Some servers will notice that the file is already compressed and
+#   will leave the file alone, adding a Content-Encoding: gzip header
+# - Some servers won't notice anything at all and will take a file
+#   that's already been compressed and compress it again, and set
+#   the Content-Encoding: gzip header
+# By setting this to request only the identity encoding we're hoping
+# to eliminate the third case.  Hopefully there does not exist a server
+# which when given a file will notice it is already compressed and that
+# you're not asking for a compressed file and will then decompress it
+# before sending because if that's the case I don't think it'll ever be
+# possible to make this work.
+HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"}
+
+
+def raise_for_status(resp: Response) -> None:
+    http_error_msg = ""
+    if isinstance(resp.reason, bytes):
+        # We attempt to decode utf-8 first because some servers
+        # choose to localize their reason strings. If the string
+        # isn't utf-8, we fall back to iso-8859-1 for all other
+        # encodings.
+        try:
+            reason = resp.reason.decode("utf-8")
+        except UnicodeDecodeError:
+            reason = resp.reason.decode("iso-8859-1")
+    else:
+        reason = resp.reason
+
+    if 400 <= resp.status_code < 500:
+        http_error_msg = (
+            f"{resp.status_code} Client Error: {reason} for url: {resp.url}"
+        )
+
+    elif 500 <= resp.status_code < 600:
+        http_error_msg = (
+            f"{resp.status_code} Server Error: {reason} for url: {resp.url}"
+        )
+
+    if http_error_msg:
+        raise NetworkConnectionError(http_error_msg, response=resp)
+
+
+def response_chunks(
+    response: Response, chunk_size: int = CONTENT_CHUNK_SIZE
+) -> Generator[bytes, None, None]:
+    """Given a requests Response, provide the data chunks."""
+    try:
+        # Special case for urllib3.
+        for chunk in response.raw.stream(
+            chunk_size,
+            # We use decode_content=False here because we don't
+            # want urllib3 to mess with the raw bytes we get
+            # from the server. If we decompress inside of
+            # urllib3 then we cannot verify the checksum
+            # because the checksum will be of the compressed
+            # file. This breakage will only occur if the
+            # server adds a Content-Encoding header, which
+            # depends on how the server was configured:
+            # - Some servers will notice that the file isn't a
+            #   compressible file and will leave the file alone
+            #   and with an empty Content-Encoding
+            # - Some servers will notice that the file is
+            #   already compressed and will leave the file
+            #   alone and will add a Content-Encoding: gzip
+            #   header
+            # - Some servers won't notice anything at all and
+            #   will take a file that's already been compressed
+            #   and compress it again and set the
+            #   Content-Encoding: gzip header
+            #
+            # By setting this not to decode automatically we
+            # hope to eliminate problems with the second case.
+            decode_content=False,
+        ):
+            yield chunk
+    except AttributeError:
+        # Standard file-like object.
+        while True:
+            chunk = response.raw.read(chunk_size)
+            if not chunk:
+                break
+            yield chunk
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/network/xmlrpc.py b/venv/lib/python3.9/site-packages/pip/_internal/network/xmlrpc.py
new file mode 100644
index 0000000..4a7d55d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/network/xmlrpc.py
@@ -0,0 +1,60 @@
+"""xmlrpclib.Transport implementation
+"""
+
+import logging
+import urllib.parse
+import xmlrpc.client
+from typing import TYPE_CHECKING, Tuple
+
+from pip._internal.exceptions import NetworkConnectionError
+from pip._internal.network.session import PipSession
+from pip._internal.network.utils import raise_for_status
+
+if TYPE_CHECKING:
+    from xmlrpc.client import _HostType, _Marshallable
+
+logger = logging.getLogger(__name__)
+
+
+class PipXmlrpcTransport(xmlrpc.client.Transport):
+    """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
+    object.
+    """
+
+    def __init__(
+        self, index_url: str, session: PipSession, use_datetime: bool = False
+    ) -> None:
+        super().__init__(use_datetime)
+        index_parts = urllib.parse.urlparse(index_url)
+        self._scheme = index_parts.scheme
+        self._session = session
+
+    def request(
+        self,
+        host: "_HostType",
+        handler: str,
+        request_body: bytes,
+        verbose: bool = False,
+    ) -> Tuple["_Marshallable", ...]:
+        assert isinstance(host, str)
+        parts = (self._scheme, host, handler, None, None, None)
+        url = urllib.parse.urlunparse(parts)
+        try:
+            headers = {"Content-Type": "text/xml"}
+            response = self._session.post(
+                url,
+                data=request_body,
+                headers=headers,
+                stream=True,
+            )
+            raise_for_status(response)
+            self.verbose = verbose
+            return self.parse_response(response.raw)
+        except NetworkConnectionError as exc:
+            assert exc.response
+            logger.critical(
+                "HTTP error %s while getting %s",
+                exc.response.status_code,
+                url,
+            )
+            raise
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/__init__.py
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..a5f6466
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/check.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/check.cpython-39.pyc
new file mode 100644
index 0000000..76e24f0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/check.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-39.pyc
new file mode 100644
index 0000000..f61e7d7
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-39.pyc
new file mode 100644
index 0000000..538fba6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__init__.py
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..22f5df5
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-39.pyc
new file mode 100644
index 0000000..2503433
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc
new file mode 100644
index 0000000..c927f4a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc
new file mode 100644
index 0000000..22dbe00
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc
new file mode 100644
index 0000000..75f838b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc
new file mode 100644
index 0000000..42bb7c1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc
new file mode 100644
index 0000000..ab87b51
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc
new file mode 100644
index 0000000..066ca38
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/build_tracker.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/build_tracker.py
new file mode 100644
index 0000000..6621549
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/build_tracker.py
@@ -0,0 +1,124 @@
+import contextlib
+import hashlib
+import logging
+import os
+from types import TracebackType
+from typing import Dict, Generator, Optional, Set, Type, Union
+
+from pip._internal.models.link import Link
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+@contextlib.contextmanager
+def update_env_context_manager(**changes: str) -> Generator[None, None, None]:
+    target = os.environ
+
+    # Save values from the target and change them.
+    non_existent_marker = object()
+    saved_values: Dict[str, Union[object, str]] = {}
+    for name, new_value in changes.items():
+        try:
+            saved_values[name] = target[name]
+        except KeyError:
+            saved_values[name] = non_existent_marker
+        target[name] = new_value
+
+    try:
+        yield
+    finally:
+        # Restore original values in the target.
+        for name, original_value in saved_values.items():
+            if original_value is non_existent_marker:
+                del target[name]
+            else:
+                assert isinstance(original_value, str)  # for mypy
+                target[name] = original_value
+
+
+@contextlib.contextmanager
+def get_build_tracker() -> Generator["BuildTracker", None, None]:
+    root = os.environ.get("PIP_BUILD_TRACKER")
+    with contextlib.ExitStack() as ctx:
+        if root is None:
+            root = ctx.enter_context(TempDirectory(kind="build-tracker")).path
+            ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root))
+            logger.debug("Initialized build tracking at %s", root)
+
+        with BuildTracker(root) as tracker:
+            yield tracker
+
+
+class BuildTracker:
+    def __init__(self, root: str) -> None:
+        self._root = root
+        self._entries: Set[InstallRequirement] = set()
+        logger.debug("Created build tracker: %s", self._root)
+
+    def __enter__(self) -> "BuildTracker":
+        logger.debug("Entered build tracker: %s", self._root)
+        return self
+
+    def __exit__(
+        self,
+        exc_type: Optional[Type[BaseException]],
+        exc_val: Optional[BaseException],
+        exc_tb: Optional[TracebackType],
+    ) -> None:
+        self.cleanup()
+
+    def _entry_path(self, link: Link) -> str:
+        hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
+        return os.path.join(self._root, hashed)
+
+    def add(self, req: InstallRequirement) -> None:
+        """Add an InstallRequirement to build tracking."""
+
+        assert req.link
+        # Get the file to write information about this requirement.
+        entry_path = self._entry_path(req.link)
+
+        # Try reading from the file. If it exists and can be read from, a build
+        # is already in progress, so a LookupError is raised.
+        try:
+            with open(entry_path) as fp:
+                contents = fp.read()
+        except FileNotFoundError:
+            pass
+        else:
+            message = "{} is already being built: {}".format(req.link, contents)
+            raise LookupError(message)
+
+        # If we're here, req should really not be building already.
+        assert req not in self._entries
+
+        # Start tracking this requirement.
+        with open(entry_path, "w", encoding="utf-8") as fp:
+            fp.write(str(req))
+        self._entries.add(req)
+
+        logger.debug("Added %s to build tracker %r", req, self._root)
+
+    def remove(self, req: InstallRequirement) -> None:
+        """Remove an InstallRequirement from build tracking."""
+
+        assert req.link
+        # Delete the created file and the corresponding entries.
+        os.unlink(self._entry_path(req.link))
+        self._entries.remove(req)
+
+        logger.debug("Removed %s from build tracker %r", req, self._root)
+
+    def cleanup(self) -> None:
+        for req in set(self._entries):
+            self.remove(req)
+
+        logger.debug("Removed build tracker: %r", self._root)
+
+    @contextlib.contextmanager
+    def track(self, req: InstallRequirement) -> Generator[None, None, None]:
+        self.add(req)
+        yield
+        self.remove(req)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata.py
new file mode 100644
index 0000000..e2b7b44
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata.py
@@ -0,0 +1,39 @@
+"""Metadata generation logic for source distributions.
+"""
+
+import os
+
+from pip._vendor.pep517.wrappers import Pep517HookCaller
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.exceptions import (
+    InstallationSubprocessError,
+    MetadataGenerationFailed,
+)
+from pip._internal.utils.subprocess import runner_with_spinner_message
+from pip._internal.utils.temp_dir import TempDirectory
+
+
+def generate_metadata(
+    build_env: BuildEnvironment, backend: Pep517HookCaller, details: str
+) -> str:
+    """Generate metadata using mechanisms described in PEP 517.
+
+    Returns the generated metadata directory.
+    """
+    metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
+
+    metadata_dir = metadata_tmpdir.path
+
+    with build_env:
+        # Note that Pep517HookCaller implements a fallback for
+        # prepare_metadata_for_build_wheel, so we don't have to
+        # consider the possibility that this hook doesn't exist.
+        runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
+        with backend.subprocess_runner(runner):
+            try:
+                distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
+            except InstallationSubprocessError as error:
+                raise MetadataGenerationFailed(package_details=details) from error
+
+    return os.path.join(metadata_dir, distinfo_dir)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_editable.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_editable.py
new file mode 100644
index 0000000..4c3f48b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_editable.py
@@ -0,0 +1,41 @@
+"""Metadata generation logic for source distributions.
+"""
+
+import os
+
+from pip._vendor.pep517.wrappers import Pep517HookCaller
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.exceptions import (
+    InstallationSubprocessError,
+    MetadataGenerationFailed,
+)
+from pip._internal.utils.subprocess import runner_with_spinner_message
+from pip._internal.utils.temp_dir import TempDirectory
+
+
+def generate_editable_metadata(
+    build_env: BuildEnvironment, backend: Pep517HookCaller, details: str
+) -> str:
+    """Generate metadata using mechanisms described in PEP 660.
+
+    Returns the generated metadata directory.
+    """
+    metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
+
+    metadata_dir = metadata_tmpdir.path
+
+    with build_env:
+        # Note that Pep517HookCaller implements a fallback for
+        # prepare_metadata_for_build_wheel/editable, so we don't have to
+        # consider the possibility that this hook doesn't exist.
+        runner = runner_with_spinner_message(
+            "Preparing editable metadata (pyproject.toml)"
+        )
+        with backend.subprocess_runner(runner):
+            try:
+                distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir)
+            except InstallationSubprocessError as error:
+                raise MetadataGenerationFailed(package_details=details) from error
+
+    return os.path.join(metadata_dir, distinfo_dir)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py
new file mode 100644
index 0000000..e60988d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/metadata_legacy.py
@@ -0,0 +1,74 @@
+"""Metadata generation logic for legacy source distributions.
+"""
+
+import logging
+import os
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.cli.spinners import open_spinner
+from pip._internal.exceptions import (
+    InstallationError,
+    InstallationSubprocessError,
+    MetadataGenerationFailed,
+)
+from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+def _find_egg_info(directory: str) -> str:
+    """Find an .egg-info subdirectory in `directory`."""
+    filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
+
+    if not filenames:
+        raise InstallationError(f"No .egg-info directory found in {directory}")
+
+    if len(filenames) > 1:
+        raise InstallationError(
+            "More than one .egg-info directory found in {}".format(directory)
+        )
+
+    return os.path.join(directory, filenames[0])
+
+
+def generate_metadata(
+    build_env: BuildEnvironment,
+    setup_py_path: str,
+    source_dir: str,
+    isolated: bool,
+    details: str,
+) -> str:
+    """Generate metadata using setup.py-based defacto mechanisms.
+
+    Returns the generated metadata directory.
+    """
+    logger.debug(
+        "Running setup.py (path:%s) egg_info for package %s",
+        setup_py_path,
+        details,
+    )
+
+    egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
+
+    args = make_setuptools_egg_info_args(
+        setup_py_path,
+        egg_info_dir=egg_info_dir,
+        no_user_config=isolated,
+    )
+
+    with build_env:
+        with open_spinner("Preparing metadata (setup.py)") as spinner:
+            try:
+                call_subprocess(
+                    args,
+                    cwd=source_dir,
+                    command_desc="python setup.py egg_info",
+                    spinner=spinner,
+                )
+            except InstallationSubprocessError as error:
+                raise MetadataGenerationFailed(package_details=details) from error
+
+    # Return the .egg-info directory.
+    return _find_egg_info(egg_info_dir)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel.py
new file mode 100644
index 0000000..b0d2fc9
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel.py
@@ -0,0 +1,37 @@
+import logging
+import os
+from typing import Optional
+
+from pip._vendor.pep517.wrappers import Pep517HookCaller
+
+from pip._internal.utils.subprocess import runner_with_spinner_message
+
+logger = logging.getLogger(__name__)
+
+
+def build_wheel_pep517(
+    name: str,
+    backend: Pep517HookCaller,
+    metadata_directory: str,
+    tempd: str,
+) -> Optional[str]:
+    """Build one InstallRequirement using the PEP 517 build process.
+
+    Returns path to wheel if successfully built. Otherwise, returns None.
+    """
+    assert metadata_directory is not None
+    try:
+        logger.debug("Destination directory: %s", tempd)
+
+        runner = runner_with_spinner_message(
+            f"Building wheel for {name} (pyproject.toml)"
+        )
+        with backend.subprocess_runner(runner):
+            wheel_name = backend.build_wheel(
+                tempd,
+                metadata_directory=metadata_directory,
+            )
+    except Exception:
+        logger.error("Failed building wheel for %s", name)
+        return None
+    return os.path.join(tempd, wheel_name)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_editable.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_editable.py
new file mode 100644
index 0000000..cf7b01a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_editable.py
@@ -0,0 +1,46 @@
+import logging
+import os
+from typing import Optional
+
+from pip._vendor.pep517.wrappers import HookMissing, Pep517HookCaller
+
+from pip._internal.utils.subprocess import runner_with_spinner_message
+
+logger = logging.getLogger(__name__)
+
+
+def build_wheel_editable(
+    name: str,
+    backend: Pep517HookCaller,
+    metadata_directory: str,
+    tempd: str,
+) -> Optional[str]:
+    """Build one InstallRequirement using the PEP 660 build process.
+
+    Returns path to wheel if successfully built. Otherwise, returns None.
+    """
+    assert metadata_directory is not None
+    try:
+        logger.debug("Destination directory: %s", tempd)
+
+        runner = runner_with_spinner_message(
+            f"Building editable for {name} (pyproject.toml)"
+        )
+        with backend.subprocess_runner(runner):
+            try:
+                wheel_name = backend.build_editable(
+                    tempd,
+                    metadata_directory=metadata_directory,
+                )
+            except HookMissing as e:
+                logger.error(
+                    "Cannot build editable %s because the build "
+                    "backend does not have the %s hook",
+                    name,
+                    e,
+                )
+                return None
+    except Exception:
+        logger.error("Failed building editable for %s", name)
+        return None
+    return os.path.join(tempd, wheel_name)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_legacy.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_legacy.py
new file mode 100644
index 0000000..c5f0492
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/build/wheel_legacy.py
@@ -0,0 +1,102 @@
+import logging
+import os.path
+from typing import List, Optional
+
+from pip._internal.cli.spinners import open_spinner
+from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
+from pip._internal.utils.subprocess import call_subprocess, format_command_args
+
+logger = logging.getLogger(__name__)
+
+
+def format_command_result(
+    command_args: List[str],
+    command_output: str,
+) -> str:
+    """Format command information for logging."""
+    command_desc = format_command_args(command_args)
+    text = f"Command arguments: {command_desc}\n"
+
+    if not command_output:
+        text += "Command output: None"
+    elif logger.getEffectiveLevel() > logging.DEBUG:
+        text += "Command output: [use --verbose to show]"
+    else:
+        if not command_output.endswith("\n"):
+            command_output += "\n"
+        text += f"Command output:\n{command_output}"
+
+    return text
+
+
+def get_legacy_build_wheel_path(
+    names: List[str],
+    temp_dir: str,
+    name: str,
+    command_args: List[str],
+    command_output: str,
+) -> Optional[str]:
+    """Return the path to the wheel in the temporary build directory."""
+    # Sort for determinism.
+    names = sorted(names)
+    if not names:
+        msg = ("Legacy build of wheel for {!r} created no files.\n").format(name)
+        msg += format_command_result(command_args, command_output)
+        logger.warning(msg)
+        return None
+
+    if len(names) > 1:
+        msg = (
+            "Legacy build of wheel for {!r} created more than one file.\n"
+            "Filenames (choosing first): {}\n"
+        ).format(name, names)
+        msg += format_command_result(command_args, command_output)
+        logger.warning(msg)
+
+    return os.path.join(temp_dir, names[0])
+
+
+def build_wheel_legacy(
+    name: str,
+    setup_py_path: str,
+    source_dir: str,
+    global_options: List[str],
+    build_options: List[str],
+    tempd: str,
+) -> Optional[str]:
+    """Build one unpacked package using the "legacy" build process.
+
+    Returns path to wheel if successfully built. Otherwise, returns None.
+    """
+    wheel_args = make_setuptools_bdist_wheel_args(
+        setup_py_path,
+        global_options=global_options,
+        build_options=build_options,
+        destination_dir=tempd,
+    )
+
+    spin_message = f"Building wheel for {name} (setup.py)"
+    with open_spinner(spin_message) as spinner:
+        logger.debug("Destination directory: %s", tempd)
+
+        try:
+            output = call_subprocess(
+                wheel_args,
+                command_desc="python setup.py bdist_wheel",
+                cwd=source_dir,
+                spinner=spinner,
+            )
+        except Exception:
+            spinner.finish("error")
+            logger.error("Failed building wheel for %s", name)
+            return None
+
+        names = os.listdir(tempd)
+        wheel_path = get_legacy_build_wheel_path(
+            names=names,
+            temp_dir=tempd,
+            name=name,
+            command_args=wheel_args,
+            command_output=output,
+        )
+        return wheel_path
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/check.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/check.py
new file mode 100644
index 0000000..fb3ac8b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/check.py
@@ -0,0 +1,149 @@
+"""Validation of dependencies of packages
+"""
+
+import logging
+from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
+
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+
+from pip._internal.distributions import make_distribution_for_install_requirement
+from pip._internal.metadata import get_default_environment
+from pip._internal.metadata.base import DistributionVersion
+from pip._internal.req.req_install import InstallRequirement
+
+logger = logging.getLogger(__name__)
+
+
+class PackageDetails(NamedTuple):
+    version: DistributionVersion
+    dependencies: List[Requirement]
+
+
+# Shorthands
+PackageSet = Dict[NormalizedName, PackageDetails]
+Missing = Tuple[NormalizedName, Requirement]
+Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement]
+
+MissingDict = Dict[NormalizedName, List[Missing]]
+ConflictingDict = Dict[NormalizedName, List[Conflicting]]
+CheckResult = Tuple[MissingDict, ConflictingDict]
+ConflictDetails = Tuple[PackageSet, CheckResult]
+
+
+def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
+    """Converts a list of distributions into a PackageSet."""
+    package_set = {}
+    problems = False
+    env = get_default_environment()
+    for dist in env.iter_installed_distributions(local_only=False, skip=()):
+        name = dist.canonical_name
+        try:
+            dependencies = list(dist.iter_dependencies())
+            package_set[name] = PackageDetails(dist.version, dependencies)
+        except (OSError, ValueError) as e:
+            # Don't crash on unreadable or broken metadata.
+            logger.warning("Error parsing requirements for %s: %s", name, e)
+            problems = True
+    return package_set, problems
+
+
+def check_package_set(
+    package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
+) -> CheckResult:
+    """Check if a package set is consistent
+
+    If should_ignore is passed, it should be a callable that takes a
+    package name and returns a boolean.
+    """
+
+    missing = {}
+    conflicting = {}
+
+    for package_name, package_detail in package_set.items():
+        # Info about dependencies of package_name
+        missing_deps: Set[Missing] = set()
+        conflicting_deps: Set[Conflicting] = set()
+
+        if should_ignore and should_ignore(package_name):
+            continue
+
+        for req in package_detail.dependencies:
+            name = canonicalize_name(req.name)
+
+            # Check if it's missing
+            if name not in package_set:
+                missed = True
+                if req.marker is not None:
+                    missed = req.marker.evaluate()
+                if missed:
+                    missing_deps.add((name, req))
+                continue
+
+            # Check if there's a conflict
+            version = package_set[name].version
+            if not req.specifier.contains(version, prereleases=True):
+                conflicting_deps.add((name, version, req))
+
+        if missing_deps:
+            missing[package_name] = sorted(missing_deps, key=str)
+        if conflicting_deps:
+            conflicting[package_name] = sorted(conflicting_deps, key=str)
+
+    return missing, conflicting
+
+
+def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
+    """For checking if the dependency graph would be consistent after \
+    installing given requirements
+    """
+    # Start from the current state
+    package_set, _ = create_package_set_from_installed()
+    # Install packages
+    would_be_installed = _simulate_installation_of(to_install, package_set)
+
+    # Only warn about directly-dependent packages; create a whitelist of them
+    whitelist = _create_whitelist(would_be_installed, package_set)
+
+    return (
+        package_set,
+        check_package_set(
+            package_set, should_ignore=lambda name: name not in whitelist
+        ),
+    )
+
+
+def _simulate_installation_of(
+    to_install: List[InstallRequirement], package_set: PackageSet
+) -> Set[NormalizedName]:
+    """Computes the version of packages after installing to_install."""
+    # Keep track of packages that were installed
+    installed = set()
+
+    # Modify it as installing requirement_set would (assuming no errors)
+    for inst_req in to_install:
+        abstract_dist = make_distribution_for_install_requirement(inst_req)
+        dist = abstract_dist.get_metadata_distribution()
+        name = dist.canonical_name
+        package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
+
+        installed.add(name)
+
+    return installed
+
+
+def _create_whitelist(
+    would_be_installed: Set[NormalizedName], package_set: PackageSet
+) -> Set[NormalizedName]:
+    packages_affected = set(would_be_installed)
+
+    for package_name in package_set:
+        if package_name in packages_affected:
+            continue
+
+        for req in package_set[package_name].dependencies:
+            if canonicalize_name(req.name) in packages_affected:
+                packages_affected.add(package_name)
+                break
+
+    return packages_affected
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/freeze.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/freeze.py
new file mode 100644
index 0000000..930d4c6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/freeze.py
@@ -0,0 +1,254 @@
+import collections
+import logging
+import os
+from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import Version
+
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.metadata import BaseDistribution, get_environment
+from pip._internal.req.constructors import (
+    install_req_from_editable,
+    install_req_from_line,
+)
+from pip._internal.req.req_file import COMMENT_RE
+from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
+
+logger = logging.getLogger(__name__)
+
+
+class _EditableInfo(NamedTuple):
+    requirement: str
+    comments: List[str]
+
+
+def freeze(
+    requirement: Optional[List[str]] = None,
+    local_only: bool = False,
+    user_only: bool = False,
+    paths: Optional[List[str]] = None,
+    isolated: bool = False,
+    exclude_editable: bool = False,
+    skip: Container[str] = (),
+) -> Generator[str, None, None]:
+    installations: Dict[str, FrozenRequirement] = {}
+
+    dists = get_environment(paths).iter_installed_distributions(
+        local_only=local_only,
+        skip=(),
+        user_only=user_only,
+    )
+    for dist in dists:
+        req = FrozenRequirement.from_dist(dist)
+        if exclude_editable and req.editable:
+            continue
+        installations[req.canonical_name] = req
+
+    if requirement:
+        # the options that don't get turned into an InstallRequirement
+        # should only be emitted once, even if the same option is in multiple
+        # requirements files, so we need to keep track of what has been emitted
+        # so that we don't emit it again if it's seen again
+        emitted_options: Set[str] = set()
+        # keep track of which files a requirement is in so that we can
+        # give an accurate warning if a requirement appears multiple times.
+        req_files: Dict[str, List[str]] = collections.defaultdict(list)
+        for req_file_path in requirement:
+            with open(req_file_path) as req_file:
+                for line in req_file:
+                    if (
+                        not line.strip()
+                        or line.strip().startswith("#")
+                        or line.startswith(
+                            (
+                                "-r",
+                                "--requirement",
+                                "-f",
+                                "--find-links",
+                                "-i",
+                                "--index-url",
+                                "--pre",
+                                "--trusted-host",
+                                "--process-dependency-links",
+                                "--extra-index-url",
+                                "--use-feature",
+                            )
+                        )
+                    ):
+                        line = line.rstrip()
+                        if line not in emitted_options:
+                            emitted_options.add(line)
+                            yield line
+                        continue
+
+                    if line.startswith("-e") or line.startswith("--editable"):
+                        if line.startswith("-e"):
+                            line = line[2:].strip()
+                        else:
+                            line = line[len("--editable") :].strip().lstrip("=")
+                        line_req = install_req_from_editable(
+                            line,
+                            isolated=isolated,
+                        )
+                    else:
+                        line_req = install_req_from_line(
+                            COMMENT_RE.sub("", line).strip(),
+                            isolated=isolated,
+                        )
+
+                    if not line_req.name:
+                        logger.info(
+                            "Skipping line in requirement file [%s] because "
+                            "it's not clear what it would install: %s",
+                            req_file_path,
+                            line.strip(),
+                        )
+                        logger.info(
+                            "  (add #egg=PackageName to the URL to avoid"
+                            " this warning)"
+                        )
+                    else:
+                        line_req_canonical_name = canonicalize_name(line_req.name)
+                        if line_req_canonical_name not in installations:
+                            # either it's not installed, or it is installed
+                            # but has been processed already
+                            if not req_files[line_req.name]:
+                                logger.warning(
+                                    "Requirement file [%s] contains %s, but "
+                                    "package %r is not installed",
+                                    req_file_path,
+                                    COMMENT_RE.sub("", line).strip(),
+                                    line_req.name,
+                                )
+                            else:
+                                req_files[line_req.name].append(req_file_path)
+                        else:
+                            yield str(installations[line_req_canonical_name]).rstrip()
+                            del installations[line_req_canonical_name]
+                            req_files[line_req.name].append(req_file_path)
+
+        # Warn about requirements that were included multiple times (in a
+        # single requirements file or in different requirements files).
+        for name, files in req_files.items():
+            if len(files) > 1:
+                logger.warning(
+                    "Requirement %s included multiple times [%s]",
+                    name,
+                    ", ".join(sorted(set(files))),
+                )
+
+        yield ("## The following requirements were added by pip freeze:")
+    for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
+        if installation.canonical_name not in skip:
+            yield str(installation).rstrip()
+
+
+def _format_as_name_version(dist: BaseDistribution) -> str:
+    if isinstance(dist.version, Version):
+        return f"{dist.raw_name}=={dist.version}"
+    return f"{dist.raw_name}==={dist.version}"
+
+
+def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
+    """
+    Compute and return values (req, comments) for use in
+    FrozenRequirement.from_dist().
+    """
+    editable_project_location = dist.editable_project_location
+    assert editable_project_location
+    location = os.path.normcase(os.path.abspath(editable_project_location))
+
+    from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
+
+    vcs_backend = vcs.get_backend_for_dir(location)
+
+    if vcs_backend is None:
+        display = _format_as_name_version(dist)
+        logger.debug(
+            'No VCS found for editable requirement "%s" in: %r',
+            display,
+            location,
+        )
+        return _EditableInfo(
+            requirement=location,
+            comments=[f"# Editable install with no version control ({display})"],
+        )
+
+    vcs_name = type(vcs_backend).__name__
+
+    try:
+        req = vcs_backend.get_src_requirement(location, dist.raw_name)
+    except RemoteNotFoundError:
+        display = _format_as_name_version(dist)
+        return _EditableInfo(
+            requirement=location,
+            comments=[f"# Editable {vcs_name} install with no remote ({display})"],
+        )
+    except RemoteNotValidError as ex:
+        display = _format_as_name_version(dist)
+        return _EditableInfo(
+            requirement=location,
+            comments=[
+                f"# Editable {vcs_name} install ({display}) with either a deleted "
+                f"local remote or invalid URI:",
+                f"# '{ex.url}'",
+            ],
+        )
+    except BadCommand:
+        logger.warning(
+            "cannot determine version of editable source in %s "
+            "(%s command not found in path)",
+            location,
+            vcs_backend.name,
+        )
+        return _EditableInfo(requirement=location, comments=[])
+    except InstallationError as exc:
+        logger.warning("Error when trying to get requirement for VCS system %s", exc)
+    else:
+        return _EditableInfo(requirement=req, comments=[])
+
+    logger.warning("Could not determine repository location of %s", location)
+
+    return _EditableInfo(
+        requirement=location,
+        comments=["## !! Could not determine repository location"],
+    )
+
+
+class FrozenRequirement:
+    def __init__(
+        self,
+        name: str,
+        req: str,
+        editable: bool,
+        comments: Iterable[str] = (),
+    ) -> None:
+        self.name = name
+        self.canonical_name = canonicalize_name(name)
+        self.req = req
+        self.editable = editable
+        self.comments = comments
+
+    @classmethod
+    def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
+        editable = dist.editable
+        if editable:
+            req, comments = _get_editable_info(dist)
+        else:
+            comments = []
+            direct_url = dist.direct_url
+            if direct_url:
+                # if PEP 610 metadata is present, use it
+                req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
+            else:
+                # name==version requirement
+                req = _format_as_name_version(dist)
+
+        return cls(dist.raw_name, req, editable, comments=comments)
+
+    def __str__(self) -> str:
+        req = self.req
+        if self.editable:
+            req = f"-e {req}"
+        return "\n".join(list(self.comments) + [str(req)]) + "\n"
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__init__.py
new file mode 100644
index 0000000..24d6a5d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__init__.py
@@ -0,0 +1,2 @@
+"""For modules related to installing packages.
+"""
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..df334f6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc
new file mode 100644
index 0000000..3237cde
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc
new file mode 100644
index 0000000..22ce590
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc
new file mode 100644
index 0000000..5249bf5
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/install/editable_legacy.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/editable_legacy.py
new file mode 100644
index 0000000..bb548cd
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/editable_legacy.py
@@ -0,0 +1,47 @@
+"""Legacy editable installation process, i.e. `setup.py develop`.
+"""
+import logging
+from typing import List, Optional, Sequence
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.setuptools_build import make_setuptools_develop_args
+from pip._internal.utils.subprocess import call_subprocess
+
+logger = logging.getLogger(__name__)
+
+
+def install_editable(
+    install_options: List[str],
+    global_options: Sequence[str],
+    prefix: Optional[str],
+    home: Optional[str],
+    use_user_site: bool,
+    name: str,
+    setup_py_path: str,
+    isolated: bool,
+    build_env: BuildEnvironment,
+    unpacked_source_directory: str,
+) -> None:
+    """Install a package in editable mode. Most arguments are pass-through
+    to setuptools.
+    """
+    logger.info("Running setup.py develop for %s", name)
+
+    args = make_setuptools_develop_args(
+        setup_py_path,
+        global_options=global_options,
+        install_options=install_options,
+        no_user_config=isolated,
+        prefix=prefix,
+        home=home,
+        use_user_site=use_user_site,
+    )
+
+    with indent_log():
+        with build_env:
+            call_subprocess(
+                args,
+                command_desc="python setup.py develop",
+                cwd=unpacked_source_directory,
+            )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/install/legacy.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/legacy.py
new file mode 100644
index 0000000..290967d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/legacy.py
@@ -0,0 +1,120 @@
+"""Legacy installation process, i.e. `setup.py install`.
+"""
+
+import logging
+import os
+from typing import List, Optional, Sequence
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.exceptions import InstallationError, LegacyInstallFailure
+from pip._internal.locations.base import change_root
+from pip._internal.models.scheme import Scheme
+from pip._internal.utils.misc import ensure_dir
+from pip._internal.utils.setuptools_build import make_setuptools_install_args
+from pip._internal.utils.subprocess import runner_with_spinner_message
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+def write_installed_files_from_setuptools_record(
+    record_lines: List[str],
+    root: Optional[str],
+    req_description: str,
+) -> None:
+    def prepend_root(path: str) -> str:
+        if root is None or not os.path.isabs(path):
+            return path
+        else:
+            return change_root(root, path)
+
+    for line in record_lines:
+        directory = os.path.dirname(line)
+        if directory.endswith(".egg-info"):
+            egg_info_dir = prepend_root(directory)
+            break
+    else:
+        message = (
+            "{} did not indicate that it installed an "
+            ".egg-info directory. Only setup.py projects "
+            "generating .egg-info directories are supported."
+        ).format(req_description)
+        raise InstallationError(message)
+
+    new_lines = []
+    for line in record_lines:
+        filename = line.strip()
+        if os.path.isdir(filename):
+            filename += os.path.sep
+        new_lines.append(os.path.relpath(prepend_root(filename), egg_info_dir))
+    new_lines.sort()
+    ensure_dir(egg_info_dir)
+    inst_files_path = os.path.join(egg_info_dir, "installed-files.txt")
+    with open(inst_files_path, "w") as f:
+        f.write("\n".join(new_lines) + "\n")
+
+
+def install(
+    install_options: List[str],
+    global_options: Sequence[str],
+    root: Optional[str],
+    home: Optional[str],
+    prefix: Optional[str],
+    use_user_site: bool,
+    pycompile: bool,
+    scheme: Scheme,
+    setup_py_path: str,
+    isolated: bool,
+    req_name: str,
+    build_env: BuildEnvironment,
+    unpacked_source_directory: str,
+    req_description: str,
+) -> bool:
+
+    header_dir = scheme.headers
+
+    with TempDirectory(kind="record") as temp_dir:
+        try:
+            record_filename = os.path.join(temp_dir.path, "install-record.txt")
+            install_args = make_setuptools_install_args(
+                setup_py_path,
+                global_options=global_options,
+                install_options=install_options,
+                record_filename=record_filename,
+                root=root,
+                prefix=prefix,
+                header_dir=header_dir,
+                home=home,
+                use_user_site=use_user_site,
+                no_user_config=isolated,
+                pycompile=pycompile,
+            )
+
+            runner = runner_with_spinner_message(
+                f"Running setup.py install for {req_name}"
+            )
+            with build_env:
+                runner(
+                    cmd=install_args,
+                    cwd=unpacked_source_directory,
+                )
+
+            if not os.path.exists(record_filename):
+                logger.debug("Record file %s not found", record_filename)
+                # Signal to the caller that we didn't install the new package
+                return False
+
+        except Exception as e:
+            # Signal to the caller that we didn't install the new package
+            raise LegacyInstallFailure(package_details=req_name) from e
+
+        # At this point, we have successfully installed the requirement.
+
+        # We intentionally do not use any encoding to read the file because
+        # setuptools writes the file using distutils.file_util.write_file,
+        # which does not specify an encoding.
+        with open(record_filename) as f:
+            record_lines = f.read().splitlines()
+
+    write_installed_files_from_setuptools_record(record_lines, root, req_description)
+    return True
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/install/wheel.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/wheel.py
new file mode 100644
index 0000000..c799413
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/install/wheel.py
@@ -0,0 +1,738 @@
+"""Support for installing and building the "wheel" binary package format.
+"""
+
+import collections
+import compileall
+import contextlib
+import csv
+import importlib
+import logging
+import os.path
+import re
+import shutil
+import sys
+import warnings
+from base64 import urlsafe_b64encode
+from email.message import Message
+from itertools import chain, filterfalse, starmap
+from typing import (
+    IO,
+    TYPE_CHECKING,
+    Any,
+    BinaryIO,
+    Callable,
+    Dict,
+    Generator,
+    Iterable,
+    Iterator,
+    List,
+    NewType,
+    Optional,
+    Sequence,
+    Set,
+    Tuple,
+    Union,
+    cast,
+)
+from zipfile import ZipFile, ZipInfo
+
+from pip._vendor.distlib.scripts import ScriptMaker
+from pip._vendor.distlib.util import get_export_entry
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.locations import get_major_minor_version
+from pip._internal.metadata import (
+    BaseDistribution,
+    FilesystemWheel,
+    get_wheel_distribution,
+)
+from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
+from pip._internal.models.scheme import SCHEME_KEYS, Scheme
+from pip._internal.utils.filesystem import adjacent_tmp_file, replace
+from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition
+from pip._internal.utils.unpacking import (
+    current_umask,
+    is_within_directory,
+    set_extracted_file_to_default_mode_plus_executable,
+    zip_item_is_executable,
+)
+from pip._internal.utils.wheel import parse_wheel
+
+if TYPE_CHECKING:
+    from typing import Protocol
+
+    class File(Protocol):
+        src_record_path: "RecordPath"
+        dest_path: str
+        changed: bool
+
+        def save(self) -> None:
+            pass
+
+
+logger = logging.getLogger(__name__)
+
+RecordPath = NewType("RecordPath", str)
+InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
+
+
+def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]:
+    """Return (encoded_digest, length) for path using hashlib.sha256()"""
+    h, length = hash_file(path, blocksize)
+    digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=")
+    return (digest, str(length))
+
+
+def csv_io_kwargs(mode: str) -> Dict[str, Any]:
+    """Return keyword arguments to properly open a CSV file
+    in the given mode.
+    """
+    return {"mode": mode, "newline": "", "encoding": "utf-8"}
+
+
+def fix_script(path: str) -> bool:
+    """Replace #!python with #!/path/to/python
+    Return True if file was changed.
+    """
+    # XXX RECORD hashes will need to be updated
+    assert os.path.isfile(path)
+
+    with open(path, "rb") as script:
+        firstline = script.readline()
+        if not firstline.startswith(b"#!python"):
+            return False
+        exename = sys.executable.encode(sys.getfilesystemencoding())
+        firstline = b"#!" + exename + os.linesep.encode("ascii")
+        rest = script.read()
+    with open(path, "wb") as script:
+        script.write(firstline)
+        script.write(rest)
+    return True
+
+
+def wheel_root_is_purelib(metadata: Message) -> bool:
+    return metadata.get("Root-Is-Purelib", "").lower() == "true"
+
+
+def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]:
+    console_scripts = {}
+    gui_scripts = {}
+    for entry_point in dist.iter_entry_points():
+        if entry_point.group == "console_scripts":
+            console_scripts[entry_point.name] = entry_point.value
+        elif entry_point.group == "gui_scripts":
+            gui_scripts[entry_point.name] = entry_point.value
+    return console_scripts, gui_scripts
+
+
+def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
+    """Determine if any scripts are not on PATH and format a warning.
+    Returns a warning message if one or more scripts are not on PATH,
+    otherwise None.
+    """
+    if not scripts:
+        return None
+
+    # Group scripts by the path they were installed in
+    grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set)
+    for destfile in scripts:
+        parent_dir = os.path.dirname(destfile)
+        script_name = os.path.basename(destfile)
+        grouped_by_dir[parent_dir].add(script_name)
+
+    # We don't want to warn for directories that are on PATH.
+    not_warn_dirs = [
+        os.path.normcase(i).rstrip(os.sep)
+        for i in os.environ.get("PATH", "").split(os.pathsep)
+    ]
+    # If an executable sits with sys.executable, we don't warn for it.
+    #     This covers the case of venv invocations without activating the venv.
+    not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
+    warn_for: Dict[str, Set[str]] = {
+        parent_dir: scripts
+        for parent_dir, scripts in grouped_by_dir.items()
+        if os.path.normcase(parent_dir) not in not_warn_dirs
+    }
+    if not warn_for:
+        return None
+
+    # Format a message
+    msg_lines = []
+    for parent_dir, dir_scripts in warn_for.items():
+        sorted_scripts: List[str] = sorted(dir_scripts)
+        if len(sorted_scripts) == 1:
+            start_text = "script {} is".format(sorted_scripts[0])
+        else:
+            start_text = "scripts {} are".format(
+                ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
+            )
+
+        msg_lines.append(
+            "The {} installed in '{}' which is not on PATH.".format(
+                start_text, parent_dir
+            )
+        )
+
+    last_line_fmt = (
+        "Consider adding {} to PATH or, if you prefer "
+        "to suppress this warning, use --no-warn-script-location."
+    )
+    if len(msg_lines) == 1:
+        msg_lines.append(last_line_fmt.format("this directory"))
+    else:
+        msg_lines.append(last_line_fmt.format("these directories"))
+
+    # Add a note if any directory starts with ~
+    warn_for_tilde = any(
+        i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
+    )
+    if warn_for_tilde:
+        tilde_warning_msg = (
+            "NOTE: The current PATH contains path(s) starting with `~`, "
+            "which may not be expanded by all applications."
+        )
+        msg_lines.append(tilde_warning_msg)
+
+    # Returns the formatted multiline message
+    return "\n".join(msg_lines)
+
+
+def _normalized_outrows(
+    outrows: Iterable[InstalledCSVRow],
+) -> List[Tuple[str, str, str]]:
+    """Normalize the given rows of a RECORD file.
+
+    Items in each row are converted into str. Rows are then sorted to make
+    the value more predictable for tests.
+
+    Each row is a 3-tuple (path, hash, size) and corresponds to a record of
+    a RECORD file (see PEP 376 and PEP 427 for details).  For the rows
+    passed to this function, the size can be an integer as an int or string,
+    or the empty string.
+    """
+    # Normally, there should only be one row per path, in which case the
+    # second and third elements don't come into play when sorting.
+    # However, in cases in the wild where a path might happen to occur twice,
+    # we don't want the sort operation to trigger an error (but still want
+    # determinism).  Since the third element can be an int or string, we
+    # coerce each element to a string to avoid a TypeError in this case.
+    # For additional background, see--
+    # https://github.com/pypa/pip/issues/5868
+    return sorted(
+        (record_path, hash_, str(size)) for record_path, hash_, size in outrows
+    )
+
+
+def _record_to_fs_path(record_path: RecordPath, lib_dir: str) -> str:
+    return os.path.join(lib_dir, record_path)
+
+
+def _fs_to_record_path(path: str, lib_dir: str) -> RecordPath:
+    # On Windows, do not handle relative paths if they belong to different
+    # logical disks
+    if os.path.splitdrive(path)[0].lower() == os.path.splitdrive(lib_dir)[0].lower():
+        path = os.path.relpath(path, lib_dir)
+
+    path = path.replace(os.path.sep, "/")
+    return cast("RecordPath", path)
+
+
+def get_csv_rows_for_installed(
+    old_csv_rows: List[List[str]],
+    installed: Dict[RecordPath, RecordPath],
+    changed: Set[RecordPath],
+    generated: List[str],
+    lib_dir: str,
+) -> List[InstalledCSVRow]:
+    """
+    :param installed: A map from archive RECORD path to installation RECORD
+        path.
+    """
+    installed_rows: List[InstalledCSVRow] = []
+    for row in old_csv_rows:
+        if len(row) > 3:
+            logger.warning("RECORD line has more than three elements: %s", row)
+        old_record_path = cast("RecordPath", row[0])
+        new_record_path = installed.pop(old_record_path, old_record_path)
+        if new_record_path in changed:
+            digest, length = rehash(_record_to_fs_path(new_record_path, lib_dir))
+        else:
+            digest = row[1] if len(row) > 1 else ""
+            length = row[2] if len(row) > 2 else ""
+        installed_rows.append((new_record_path, digest, length))
+    for f in generated:
+        path = _fs_to_record_path(f, lib_dir)
+        digest, length = rehash(f)
+        installed_rows.append((path, digest, length))
+    for installed_record_path in installed.values():
+        installed_rows.append((installed_record_path, "", ""))
+    return installed_rows
+
+
+def get_console_script_specs(console: Dict[str, str]) -> List[str]:
+    """
+    Given the mapping from entrypoint name to callable, return the relevant
+    console script specs.
+    """
+    # Don't mutate caller's version
+    console = console.copy()
+
+    scripts_to_generate = []
+
+    # Special case pip and setuptools to generate versioned wrappers
+    #
+    # The issue is that some projects (specifically, pip and setuptools) use
+    # code in setup.py to create "versioned" entry points - pip2.7 on Python
+    # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
+    # the wheel metadata at build time, and so if the wheel is installed with
+    # a *different* version of Python the entry points will be wrong. The
+    # correct fix for this is to enhance the metadata to be able to describe
+    # such versioned entry points, but that won't happen till Metadata 2.0 is
+    # available.
+    # In the meantime, projects using versioned entry points will either have
+    # incorrect versioned entry points, or they will not be able to distribute
+    # "universal" wheels (i.e., they will need a wheel per Python version).
+    #
+    # Because setuptools and pip are bundled with _ensurepip and virtualenv,
+    # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
+    # override the versioned entry points in the wheel and generate the
+    # correct ones. This code is purely a short-term measure until Metadata 2.0
+    # is available.
+    #
+    # To add the level of hack in this section of code, in order to support
+    # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
+    # variable which will control which version scripts get installed.
+    #
+    # ENSUREPIP_OPTIONS=altinstall
+    #   - Only pipX.Y and easy_install-X.Y will be generated and installed
+    # ENSUREPIP_OPTIONS=install
+    #   - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
+    #     that this option is technically if ENSUREPIP_OPTIONS is set and is
+    #     not altinstall
+    # DEFAULT
+    #   - The default behavior is to install pip, pipX, pipX.Y, easy_install
+    #     and easy_install-X.Y.
+    pip_script = console.pop("pip", None)
+    if pip_script:
+        if "ENSUREPIP_OPTIONS" not in os.environ:
+            scripts_to_generate.append("pip = " + pip_script)
+
+        if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
+            scripts_to_generate.append(
+                "pip{} = {}".format(sys.version_info[0], pip_script)
+            )
+
+        scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
+        # Delete any other versioned pip entry points
+        pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)]
+        for k in pip_ep:
+            del console[k]
+    easy_install_script = console.pop("easy_install", None)
+    if easy_install_script:
+        if "ENSUREPIP_OPTIONS" not in os.environ:
+            scripts_to_generate.append("easy_install = " + easy_install_script)
+
+        scripts_to_generate.append(
+            "easy_install-{} = {}".format(
+                get_major_minor_version(), easy_install_script
+            )
+        )
+        # Delete any other versioned easy_install entry points
+        easy_install_ep = [
+            k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k)
+        ]
+        for k in easy_install_ep:
+            del console[k]
+
+    # Generate the console entry points specified in the wheel
+    scripts_to_generate.extend(starmap("{} = {}".format, console.items()))
+
+    return scripts_to_generate
+
+
+class ZipBackedFile:
+    def __init__(
+        self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile
+    ) -> None:
+        self.src_record_path = src_record_path
+        self.dest_path = dest_path
+        self._zip_file = zip_file
+        self.changed = False
+
+    def _getinfo(self) -> ZipInfo:
+        return self._zip_file.getinfo(self.src_record_path)
+
+    def save(self) -> None:
+        # directory creation is lazy and after file filtering
+        # to ensure we don't install empty dirs; empty dirs can't be
+        # uninstalled.
+        parent_dir = os.path.dirname(self.dest_path)
+        ensure_dir(parent_dir)
+
+        # When we open the output file below, any existing file is truncated
+        # before we start writing the new contents. This is fine in most
+        # cases, but can cause a segfault if pip has loaded a shared
+        # object (e.g. from pyopenssl through its vendored urllib3)
+        # Since the shared object is mmap'd an attempt to call a
+        # symbol in it will then cause a segfault. Unlinking the file
+        # allows writing of new contents while allowing the process to
+        # continue to use the old copy.
+        if os.path.exists(self.dest_path):
+            os.unlink(self.dest_path)
+
+        zipinfo = self._getinfo()
+
+        with self._zip_file.open(zipinfo) as f:
+            with open(self.dest_path, "wb") as dest:
+                shutil.copyfileobj(f, dest)
+
+        if zip_item_is_executable(zipinfo):
+            set_extracted_file_to_default_mode_plus_executable(self.dest_path)
+
+
+class ScriptFile:
+    def __init__(self, file: "File") -> None:
+        self._file = file
+        self.src_record_path = self._file.src_record_path
+        self.dest_path = self._file.dest_path
+        self.changed = False
+
+    def save(self) -> None:
+        self._file.save()
+        self.changed = fix_script(self.dest_path)
+
+
+class MissingCallableSuffix(InstallationError):
+    def __init__(self, entry_point: str) -> None:
+        super().__init__(
+            "Invalid script entry point: {} - A callable "
+            "suffix is required. Cf https://packaging.python.org/"
+            "specifications/entry-points/#use-for-scripts for more "
+            "information.".format(entry_point)
+        )
+
+
+def _raise_for_invalid_entrypoint(specification: str) -> None:
+    entry = get_export_entry(specification)
+    if entry is not None and entry.suffix is None:
+        raise MissingCallableSuffix(str(entry))
+
+
+class PipScriptMaker(ScriptMaker):
+    def make(
+        self, specification: str, options: Optional[Dict[str, Any]] = None
+    ) -> List[str]:
+        _raise_for_invalid_entrypoint(specification)
+        return super().make(specification, options)
+
+
+def _install_wheel(
+    name: str,
+    wheel_zip: ZipFile,
+    wheel_path: str,
+    scheme: Scheme,
+    pycompile: bool = True,
+    warn_script_location: bool = True,
+    direct_url: Optional[DirectUrl] = None,
+    requested: bool = False,
+) -> None:
+    """Install a wheel.
+
+    :param name: Name of the project to install
+    :param wheel_zip: open ZipFile for wheel being installed
+    :param scheme: Distutils scheme dictating the install directories
+    :param req_description: String used in place of the requirement, for
+        logging
+    :param pycompile: Whether to byte-compile installed Python files
+    :param warn_script_location: Whether to check that scripts are installed
+        into a directory on PATH
+    :raises UnsupportedWheel:
+        * when the directory holds an unpacked wheel with incompatible
+          Wheel-Version
+        * when the .dist-info dir does not match the wheel
+    """
+    info_dir, metadata = parse_wheel(wheel_zip, name)
+
+    if wheel_root_is_purelib(metadata):
+        lib_dir = scheme.purelib
+    else:
+        lib_dir = scheme.platlib
+
+    # Record details of the files moved
+    #   installed = files copied from the wheel to the destination
+    #   changed = files changed while installing (scripts #! line typically)
+    #   generated = files newly generated during the install (script wrappers)
+    installed: Dict[RecordPath, RecordPath] = {}
+    changed: Set[RecordPath] = set()
+    generated: List[str] = []
+
+    def record_installed(
+        srcfile: RecordPath, destfile: str, modified: bool = False
+    ) -> None:
+        """Map archive RECORD paths to installation RECORD paths."""
+        newpath = _fs_to_record_path(destfile, lib_dir)
+        installed[srcfile] = newpath
+        if modified:
+            changed.add(newpath)
+
+    def is_dir_path(path: RecordPath) -> bool:
+        return path.endswith("/")
+
+    def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None:
+        if not is_within_directory(dest_dir_path, target_path):
+            message = (
+                "The wheel {!r} has a file {!r} trying to install"
+                " outside the target directory {!r}"
+            )
+            raise InstallationError(
+                message.format(wheel_path, target_path, dest_dir_path)
+            )
+
+    def root_scheme_file_maker(
+        zip_file: ZipFile, dest: str
+    ) -> Callable[[RecordPath], "File"]:
+        def make_root_scheme_file(record_path: RecordPath) -> "File":
+            normed_path = os.path.normpath(record_path)
+            dest_path = os.path.join(dest, normed_path)
+            assert_no_path_traversal(dest, dest_path)
+            return ZipBackedFile(record_path, dest_path, zip_file)
+
+        return make_root_scheme_file
+
+    def data_scheme_file_maker(
+        zip_file: ZipFile, scheme: Scheme
+    ) -> Callable[[RecordPath], "File"]:
+        scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS}
+
+        def make_data_scheme_file(record_path: RecordPath) -> "File":
+            normed_path = os.path.normpath(record_path)
+            try:
+                _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
+            except ValueError:
+                message = (
+                    "Unexpected file in {}: {!r}. .data directory contents"
+                    " should be named like: '<scheme key>/<path>'."
+                ).format(wheel_path, record_path)
+                raise InstallationError(message)
+
+            try:
+                scheme_path = scheme_paths[scheme_key]
+            except KeyError:
+                valid_scheme_keys = ", ".join(sorted(scheme_paths))
+                message = (
+                    "Unknown scheme key used in {}: {} (for file {!r}). .data"
+                    " directory contents should be in subdirectories named"
+                    " with a valid scheme key ({})"
+                ).format(wheel_path, scheme_key, record_path, valid_scheme_keys)
+                raise InstallationError(message)
+
+            dest_path = os.path.join(scheme_path, dest_subpath)
+            assert_no_path_traversal(scheme_path, dest_path)
+            return ZipBackedFile(record_path, dest_path, zip_file)
+
+        return make_data_scheme_file
+
+    def is_data_scheme_path(path: RecordPath) -> bool:
+        return path.split("/", 1)[0].endswith(".data")
+
+    paths = cast(List[RecordPath], wheel_zip.namelist())
+    file_paths = filterfalse(is_dir_path, paths)
+    root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths)
+
+    make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir)
+    files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths)
+
+    def is_script_scheme_path(path: RecordPath) -> bool:
+        parts = path.split("/", 2)
+        return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts"
+
+    other_scheme_paths, script_scheme_paths = partition(
+        is_script_scheme_path, data_scheme_paths
+    )
+
+    make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme)
+    other_scheme_files = map(make_data_scheme_file, other_scheme_paths)
+    files = chain(files, other_scheme_files)
+
+    # Get the defined entry points
+    distribution = get_wheel_distribution(
+        FilesystemWheel(wheel_path),
+        canonicalize_name(name),
+    )
+    console, gui = get_entrypoints(distribution)
+
+    def is_entrypoint_wrapper(file: "File") -> bool:
+        # EP, EP.exe and EP-script.py are scripts generated for
+        # entry point EP by setuptools
+        path = file.dest_path
+        name = os.path.basename(path)
+        if name.lower().endswith(".exe"):
+            matchname = name[:-4]
+        elif name.lower().endswith("-script.py"):
+            matchname = name[:-10]
+        elif name.lower().endswith(".pya"):
+            matchname = name[:-4]
+        else:
+            matchname = name
+        # Ignore setuptools-generated scripts
+        return matchname in console or matchname in gui
+
+    script_scheme_files: Iterator[File] = map(
+        make_data_scheme_file, script_scheme_paths
+    )
+    script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files)
+    script_scheme_files = map(ScriptFile, script_scheme_files)
+    files = chain(files, script_scheme_files)
+
+    for file in files:
+        file.save()
+        record_installed(file.src_record_path, file.dest_path, file.changed)
+
+    def pyc_source_file_paths() -> Generator[str, None, None]:
+        # We de-duplicate installation paths, since there can be overlap (e.g.
+        # file in .data maps to same location as file in wheel root).
+        # Sorting installation paths makes it easier to reproduce and debug
+        # issues related to permissions on existing files.
+        for installed_path in sorted(set(installed.values())):
+            full_installed_path = os.path.join(lib_dir, installed_path)
+            if not os.path.isfile(full_installed_path):
+                continue
+            if not full_installed_path.endswith(".py"):
+                continue
+            yield full_installed_path
+
+    def pyc_output_path(path: str) -> str:
+        """Return the path the pyc file would have been written to."""
+        return importlib.util.cache_from_source(path)
+
+    # Compile all of the pyc files for the installed files
+    if pycompile:
+        with captured_stdout() as stdout:
+            with warnings.catch_warnings():
+                warnings.filterwarnings("ignore")
+                for path in pyc_source_file_paths():
+                    success = compileall.compile_file(path, force=True, quiet=True)
+                    if success:
+                        pyc_path = pyc_output_path(path)
+                        assert os.path.exists(pyc_path)
+                        pyc_record_path = cast(
+                            "RecordPath", pyc_path.replace(os.path.sep, "/")
+                        )
+                        record_installed(pyc_record_path, pyc_path)
+        logger.debug(stdout.getvalue())
+
+    maker = PipScriptMaker(None, scheme.scripts)
+
+    # Ensure old scripts are overwritten.
+    # See https://github.com/pypa/pip/issues/1800
+    maker.clobber = True
+
+    # Ensure we don't generate any variants for scripts because this is almost
+    # never what somebody wants.
+    # See https://bitbucket.org/pypa/distlib/issue/35/
+    maker.variants = {""}
+
+    # This is required because otherwise distlib creates scripts that are not
+    # executable.
+    # See https://bitbucket.org/pypa/distlib/issue/32/
+    maker.set_mode = True
+
+    # Generate the console and GUI entry points specified in the wheel
+    scripts_to_generate = get_console_script_specs(console)
+
+    gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items()))
+
+    generated_console_scripts = maker.make_multiple(scripts_to_generate)
+    generated.extend(generated_console_scripts)
+
+    generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True}))
+
+    if warn_script_location:
+        msg = message_about_scripts_not_on_PATH(generated_console_scripts)
+        if msg is not None:
+            logger.warning(msg)
+
+    generated_file_mode = 0o666 & ~current_umask()
+
+    @contextlib.contextmanager
+    def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
+        with adjacent_tmp_file(path, **kwargs) as f:
+            yield f
+        os.chmod(f.name, generated_file_mode)
+        replace(f.name, path)
+
+    dest_info_dir = os.path.join(lib_dir, info_dir)
+
+    # Record pip as the installer
+    installer_path = os.path.join(dest_info_dir, "INSTALLER")
+    with _generate_file(installer_path) as installer_file:
+        installer_file.write(b"pip\n")
+    generated.append(installer_path)
+
+    # Record the PEP 610 direct URL reference
+    if direct_url is not None:
+        direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
+        with _generate_file(direct_url_path) as direct_url_file:
+            direct_url_file.write(direct_url.to_json().encode("utf-8"))
+        generated.append(direct_url_path)
+
+    # Record the REQUESTED file
+    if requested:
+        requested_path = os.path.join(dest_info_dir, "REQUESTED")
+        with open(requested_path, "wb"):
+            pass
+        generated.append(requested_path)
+
+    record_text = distribution.read_text("RECORD")
+    record_rows = list(csv.reader(record_text.splitlines()))
+
+    rows = get_csv_rows_for_installed(
+        record_rows,
+        installed=installed,
+        changed=changed,
+        generated=generated,
+        lib_dir=lib_dir,
+    )
+
+    # Record details of all files installed
+    record_path = os.path.join(dest_info_dir, "RECORD")
+
+    with _generate_file(record_path, **csv_io_kwargs("w")) as record_file:
+        # Explicitly cast to typing.IO[str] as a workaround for the mypy error:
+        # "writer" has incompatible type "BinaryIO"; expected "_Writer"
+        writer = csv.writer(cast("IO[str]", record_file))
+        writer.writerows(_normalized_outrows(rows))
+
+
+@contextlib.contextmanager
+def req_error_context(req_description: str) -> Generator[None, None, None]:
+    try:
+        yield
+    except InstallationError as e:
+        message = "For req: {}. {}".format(req_description, e.args[0])
+        raise InstallationError(message) from e
+
+
+def install_wheel(
+    name: str,
+    wheel_path: str,
+    scheme: Scheme,
+    req_description: str,
+    pycompile: bool = True,
+    warn_script_location: bool = True,
+    direct_url: Optional[DirectUrl] = None,
+    requested: bool = False,
+) -> None:
+    with ZipFile(wheel_path, allowZip64=True) as z:
+        with req_error_context(req_description):
+            _install_wheel(
+                name=name,
+                wheel_zip=z,
+                wheel_path=wheel_path,
+                scheme=scheme,
+                pycompile=pycompile,
+                warn_script_location=warn_script_location,
+                direct_url=direct_url,
+                requested=requested,
+            )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/operations/prepare.py b/venv/lib/python3.9/site-packages/pip/_internal/operations/prepare.py
new file mode 100644
index 0000000..4bf414c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/operations/prepare.py
@@ -0,0 +1,667 @@
+"""Prepares a distribution for installation
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import logging
+import mimetypes
+import os
+import shutil
+from typing import Dict, Iterable, List, Optional
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.distributions import make_distribution_for_install_requirement
+from pip._internal.distributions.installed import InstalledDistribution
+from pip._internal.exceptions import (
+    DirectoryUrlHashUnsupported,
+    HashMismatch,
+    HashUnpinned,
+    InstallationError,
+    MetadataInconsistent,
+    NetworkConnectionError,
+    PreviousBuildDirError,
+    VcsHashUnsupported,
+)
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution, get_metadata_distribution
+from pip._internal.models.direct_url import ArchiveInfo
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.network.download import BatchDownloader, Downloader
+from pip._internal.network.lazy_wheel import (
+    HTTPRangeRequestUnsupported,
+    dist_from_wheel_url,
+)
+from pip._internal.network.session import PipSession
+from pip._internal.operations.build.build_tracker import BuildTracker
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.direct_url_helpers import (
+    direct_url_for_editable,
+    direct_url_from_link,
+)
+from pip._internal.utils.hashes import Hashes, MissingHashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+    display_path,
+    hash_file,
+    hide_url,
+    is_installable_dir,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.unpacking import unpack_file
+from pip._internal.vcs import vcs
+
+logger = logging.getLogger(__name__)
+
+
+def _get_prepared_distribution(
+    req: InstallRequirement,
+    build_tracker: BuildTracker,
+    finder: PackageFinder,
+    build_isolation: bool,
+    check_build_deps: bool,
+) -> BaseDistribution:
+    """Prepare a distribution for installation."""
+    abstract_dist = make_distribution_for_install_requirement(req)
+    with build_tracker.track(req):
+        abstract_dist.prepare_distribution_metadata(
+            finder, build_isolation, check_build_deps
+        )
+    return abstract_dist.get_metadata_distribution()
+
+
+def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
+    vcs_backend = vcs.get_backend_for_scheme(link.scheme)
+    assert vcs_backend is not None
+    vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
+
+
+class File:
+    def __init__(self, path: str, content_type: Optional[str]) -> None:
+        self.path = path
+        if content_type is None:
+            self.content_type = mimetypes.guess_type(path)[0]
+        else:
+            self.content_type = content_type
+
+
+def get_http_url(
+    link: Link,
+    download: Downloader,
+    download_dir: Optional[str] = None,
+    hashes: Optional[Hashes] = None,
+) -> File:
+    temp_dir = TempDirectory(kind="unpack", globally_managed=True)
+    # If a download dir is specified, is the file already downloaded there?
+    already_downloaded_path = None
+    if download_dir:
+        already_downloaded_path = _check_download_dir(link, download_dir, hashes)
+
+    if already_downloaded_path:
+        from_path = already_downloaded_path
+        content_type = None
+    else:
+        # let's download to a tmp dir
+        from_path, content_type = download(link, temp_dir.path)
+        if hashes:
+            hashes.check_against_path(from_path)
+
+    return File(from_path, content_type)
+
+
+def get_file_url(
+    link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
+) -> File:
+    """Get file and optionally check its hash."""
+    # If a download dir is specified, is the file already there and valid?
+    already_downloaded_path = None
+    if download_dir:
+        already_downloaded_path = _check_download_dir(link, download_dir, hashes)
+
+    if already_downloaded_path:
+        from_path = already_downloaded_path
+    else:
+        from_path = link.file_path
+
+    # If --require-hashes is off, `hashes` is either empty, the
+    # link's embedded hash, or MissingHashes; it is required to
+    # match. If --require-hashes is on, we are satisfied by any
+    # hash in `hashes` matching: a URL-based or an option-based
+    # one; no internet-sourced hash will be in `hashes`.
+    if hashes:
+        hashes.check_against_path(from_path)
+    return File(from_path, None)
+
+
+def unpack_url(
+    link: Link,
+    location: str,
+    download: Downloader,
+    verbosity: int,
+    download_dir: Optional[str] = None,
+    hashes: Optional[Hashes] = None,
+) -> Optional[File]:
+    """Unpack link into location, downloading if required.
+
+    :param hashes: A Hashes object, one of whose embedded hashes must match,
+        or HashMismatch will be raised. If the Hashes is empty, no matches are
+        required, and unhashable types of requirements (like VCS ones, which
+        would ordinarily raise HashUnsupported) are allowed.
+    """
+    # non-editable vcs urls
+    if link.is_vcs:
+        unpack_vcs_link(link, location, verbosity=verbosity)
+        return None
+
+    assert not link.is_existing_dir()
+
+    # file urls
+    if link.is_file:
+        file = get_file_url(link, download_dir, hashes=hashes)
+
+    # http urls
+    else:
+        file = get_http_url(
+            link,
+            download,
+            download_dir,
+            hashes=hashes,
+        )
+
+    # unpack the archive to the build dir location. even when only downloading
+    # archives, they have to be unpacked to parse dependencies, except wheels
+    if not link.is_wheel:
+        unpack_file(file.path, location, file.content_type)
+
+    return file
+
+
+def _check_download_dir(
+    link: Link, download_dir: str, hashes: Optional[Hashes]
+) -> Optional[str]:
+    """Check download_dir for previously downloaded file with correct hash
+    If a correct file is found return its path else None
+    """
+    download_path = os.path.join(download_dir, link.filename)
+
+    if not os.path.exists(download_path):
+        return None
+
+    # If already downloaded, does its hash match?
+    logger.info("File was already downloaded %s", download_path)
+    if hashes:
+        try:
+            hashes.check_against_path(download_path)
+        except HashMismatch:
+            logger.warning(
+                "Previously-downloaded file %s has bad hash. Re-downloading.",
+                download_path,
+            )
+            os.unlink(download_path)
+            return None
+    return download_path
+
+
+class RequirementPreparer:
+    """Prepares a Requirement"""
+
+    def __init__(
+        self,
+        build_dir: str,
+        download_dir: Optional[str],
+        src_dir: str,
+        build_isolation: bool,
+        check_build_deps: bool,
+        build_tracker: BuildTracker,
+        session: PipSession,
+        progress_bar: str,
+        finder: PackageFinder,
+        require_hashes: bool,
+        use_user_site: bool,
+        lazy_wheel: bool,
+        verbosity: int,
+    ) -> None:
+        super().__init__()
+
+        self.src_dir = src_dir
+        self.build_dir = build_dir
+        self.build_tracker = build_tracker
+        self._session = session
+        self._download = Downloader(session, progress_bar)
+        self._batch_download = BatchDownloader(session, progress_bar)
+        self.finder = finder
+
+        # Where still-packed archives should be written to. If None, they are
+        # not saved, and are deleted immediately after unpacking.
+        self.download_dir = download_dir
+
+        # Is build isolation allowed?
+        self.build_isolation = build_isolation
+
+        # Should check build dependencies?
+        self.check_build_deps = check_build_deps
+
+        # Should hash-checking be required?
+        self.require_hashes = require_hashes
+
+        # Should install in user site-packages?
+        self.use_user_site = use_user_site
+
+        # Should wheels be downloaded lazily?
+        self.use_lazy_wheel = lazy_wheel
+
+        # How verbose should underlying tooling be?
+        self.verbosity = verbosity
+
+        # Memoized downloaded files, as mapping of url: path.
+        self._downloaded: Dict[str, str] = {}
+
+        # Previous "header" printed for a link-based InstallRequirement
+        self._previous_requirement_header = ("", "")
+
+    def _log_preparing_link(self, req: InstallRequirement) -> None:
+        """Provide context for the requirement being prepared."""
+        if req.link.is_file and not req.original_link_is_in_wheel_cache:
+            message = "Processing %s"
+            information = str(display_path(req.link.file_path))
+        else:
+            message = "Collecting %s"
+            information = str(req.req or req)
+
+        if (message, information) != self._previous_requirement_header:
+            self._previous_requirement_header = (message, information)
+            logger.info(message, information)
+
+        if req.original_link_is_in_wheel_cache:
+            with indent_log():
+                logger.info("Using cached %s", req.link.filename)
+
+    def _ensure_link_req_src_dir(
+        self, req: InstallRequirement, parallel_builds: bool
+    ) -> None:
+        """Ensure source_dir of a linked InstallRequirement."""
+        # Since source_dir is only set for editable requirements.
+        if req.link.is_wheel:
+            # We don't need to unpack wheels, so no need for a source
+            # directory.
+            return
+        assert req.source_dir is None
+        if req.link.is_existing_dir():
+            # build local directories in-tree
+            req.source_dir = req.link.file_path
+            return
+
+        # We always delete unpacked sdists after pip runs.
+        req.ensure_has_source_dir(
+            self.build_dir,
+            autodelete=True,
+            parallel_builds=parallel_builds,
+        )
+
+        # If a checkout exists, it's unwise to keep going.  version
+        # inconsistencies are logged later, but do not fail the
+        # installation.
+        # FIXME: this won't upgrade when there's an existing
+        # package unpacked in `req.source_dir`
+        # TODO: this check is now probably dead code
+        if is_installable_dir(req.source_dir):
+            raise PreviousBuildDirError(
+                "pip can't proceed with requirements '{}' due to a"
+                "pre-existing build directory ({}). This is likely "
+                "due to a previous installation that failed . pip is "
+                "being responsible and not assuming it can delete this. "
+                "Please delete it and try again.".format(req, req.source_dir)
+            )
+
+    def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
+        # By the time this is called, the requirement's link should have
+        # been checked so we can tell what kind of requirements req is
+        # and raise some more informative errors than otherwise.
+        # (For example, we can raise VcsHashUnsupported for a VCS URL
+        # rather than HashMissing.)
+        if not self.require_hashes:
+            return req.hashes(trust_internet=True)
+
+        # We could check these first 2 conditions inside unpack_url
+        # and save repetition of conditions, but then we would
+        # report less-useful error messages for unhashable
+        # requirements, complaining that there's no hash provided.
+        if req.link.is_vcs:
+            raise VcsHashUnsupported()
+        if req.link.is_existing_dir():
+            raise DirectoryUrlHashUnsupported()
+
+        # Unpinned packages are asking for trouble when a new version
+        # is uploaded.  This isn't a security check, but it saves users
+        # a surprising hash mismatch in the future.
+        # file:/// URLs aren't pinnable, so don't complain about them
+        # not being pinned.
+        if req.original_link is None and not req.is_pinned:
+            raise HashUnpinned()
+
+        # If known-good hashes are missing for this requirement,
+        # shim it with a facade object that will provoke hash
+        # computation and then raise a HashMissing exception
+        # showing the user what the hash should be.
+        return req.hashes(trust_internet=False) or MissingHashes()
+
+    def _fetch_metadata_only(
+        self,
+        req: InstallRequirement,
+    ) -> Optional[BaseDistribution]:
+        if self.require_hashes:
+            logger.debug(
+                "Metadata-only fetching is not used as hash checking is required",
+            )
+            return None
+        # Try PEP 658 metadata first, then fall back to lazy wheel if unavailable.
+        return self._fetch_metadata_using_link_data_attr(
+            req
+        ) or self._fetch_metadata_using_lazy_wheel(req.link)
+
+    def _fetch_metadata_using_link_data_attr(
+        self,
+        req: InstallRequirement,
+    ) -> Optional[BaseDistribution]:
+        """Fetch metadata from the data-dist-info-metadata attribute, if possible."""
+        # (1) Get the link to the metadata file, if provided by the backend.
+        metadata_link = req.link.metadata_link()
+        if metadata_link is None:
+            return None
+        assert req.req is not None
+        logger.info(
+            "Obtaining dependency information for %s from %s",
+            req.req,
+            metadata_link,
+        )
+        # (2) Download the contents of the METADATA file, separate from the dist itself.
+        metadata_file = get_http_url(
+            metadata_link,
+            self._download,
+            hashes=metadata_link.as_hashes(),
+        )
+        with open(metadata_file.path, "rb") as f:
+            metadata_contents = f.read()
+        # (3) Generate a dist just from those file contents.
+        metadata_dist = get_metadata_distribution(
+            metadata_contents,
+            req.link.filename,
+            req.req.name,
+        )
+        # (4) Ensure the Name: field from the METADATA file matches the name from the
+        #     install requirement.
+        #
+        #     NB: raw_name will fall back to the name from the install requirement if
+        #     the Name: field is not present, but it's noted in the raw_name docstring
+        #     that that should NEVER happen anyway.
+        if metadata_dist.raw_name != req.req.name:
+            raise MetadataInconsistent(
+                req, "Name", req.req.name, metadata_dist.raw_name
+            )
+        return metadata_dist
+
+    def _fetch_metadata_using_lazy_wheel(
+        self,
+        link: Link,
+    ) -> Optional[BaseDistribution]:
+        """Fetch metadata using lazy wheel, if possible."""
+        # --use-feature=fast-deps must be provided.
+        if not self.use_lazy_wheel:
+            return None
+        if link.is_file or not link.is_wheel:
+            logger.debug(
+                "Lazy wheel is not used as %r does not point to a remote wheel",
+                link,
+            )
+            return None
+
+        wheel = Wheel(link.filename)
+        name = canonicalize_name(wheel.name)
+        logger.info(
+            "Obtaining dependency information from %s %s",
+            name,
+            wheel.version,
+        )
+        url = link.url.split("#", 1)[0]
+        try:
+            return dist_from_wheel_url(name, url, self._session)
+        except HTTPRangeRequestUnsupported:
+            logger.debug("%s does not support range requests", url)
+            return None
+
+    def _complete_partial_requirements(
+        self,
+        partially_downloaded_reqs: Iterable[InstallRequirement],
+        parallel_builds: bool = False,
+    ) -> None:
+        """Download any requirements which were only fetched by metadata."""
+        # Download to a temporary directory. These will be copied over as
+        # needed for downstream 'download', 'wheel', and 'install' commands.
+        temp_dir = TempDirectory(kind="unpack", globally_managed=True).path
+
+        # Map each link to the requirement that owns it. This allows us to set
+        # `req.local_file_path` on the appropriate requirement after passing
+        # all the links at once into BatchDownloader.
+        links_to_fully_download: Dict[Link, InstallRequirement] = {}
+        for req in partially_downloaded_reqs:
+            assert req.link
+            links_to_fully_download[req.link] = req
+
+        batch_download = self._batch_download(
+            links_to_fully_download.keys(),
+            temp_dir,
+        )
+        for link, (filepath, _) in batch_download:
+            logger.debug("Downloading link %s to %s", link, filepath)
+            req = links_to_fully_download[link]
+            req.local_file_path = filepath
+
+        # This step is necessary to ensure all lazy wheels are processed
+        # successfully by the 'download', 'wheel', and 'install' commands.
+        for req in partially_downloaded_reqs:
+            self._prepare_linked_requirement(req, parallel_builds)
+
+    def prepare_linked_requirement(
+        self, req: InstallRequirement, parallel_builds: bool = False
+    ) -> BaseDistribution:
+        """Prepare a requirement to be obtained from req.link."""
+        assert req.link
+        self._log_preparing_link(req)
+        with indent_log():
+            # Check if the relevant file is already available
+            # in the download directory
+            file_path = None
+            if self.download_dir is not None and req.link.is_wheel:
+                hashes = self._get_linked_req_hashes(req)
+                file_path = _check_download_dir(req.link, self.download_dir, hashes)
+
+            if file_path is not None:
+                # The file is already available, so mark it as downloaded
+                self._downloaded[req.link.url] = file_path
+            else:
+                # The file is not available, attempt to fetch only metadata
+                metadata_dist = self._fetch_metadata_only(req)
+                if metadata_dist is not None:
+                    req.needs_more_preparation = True
+                    return metadata_dist
+
+            # None of the optimizations worked, fully prepare the requirement
+            return self._prepare_linked_requirement(req, parallel_builds)
+
+    def prepare_linked_requirements_more(
+        self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
+    ) -> None:
+        """Prepare linked requirements more, if needed."""
+        reqs = [req for req in reqs if req.needs_more_preparation]
+        for req in reqs:
+            # Determine if any of these requirements were already downloaded.
+            if self.download_dir is not None and req.link.is_wheel:
+                hashes = self._get_linked_req_hashes(req)
+                file_path = _check_download_dir(req.link, self.download_dir, hashes)
+                if file_path is not None:
+                    self._downloaded[req.link.url] = file_path
+                    req.needs_more_preparation = False
+
+        # Prepare requirements we found were already downloaded for some
+        # reason. The other downloads will be completed separately.
+        partially_downloaded_reqs: List[InstallRequirement] = []
+        for req in reqs:
+            if req.needs_more_preparation:
+                partially_downloaded_reqs.append(req)
+            else:
+                self._prepare_linked_requirement(req, parallel_builds)
+
+        # TODO: separate this part out from RequirementPreparer when the v1
+        # resolver can be removed!
+        self._complete_partial_requirements(
+            partially_downloaded_reqs,
+            parallel_builds=parallel_builds,
+        )
+
+    def _prepare_linked_requirement(
+        self, req: InstallRequirement, parallel_builds: bool
+    ) -> BaseDistribution:
+        assert req.link
+        link = req.link
+
+        self._ensure_link_req_src_dir(req, parallel_builds)
+        hashes = self._get_linked_req_hashes(req)
+
+        if link.is_existing_dir():
+            local_file = None
+        elif link.url not in self._downloaded:
+            try:
+                local_file = unpack_url(
+                    link,
+                    req.source_dir,
+                    self._download,
+                    self.verbosity,
+                    self.download_dir,
+                    hashes,
+                )
+            except NetworkConnectionError as exc:
+                raise InstallationError(
+                    "Could not install requirement {} because of HTTP "
+                    "error {} for URL {}".format(req, exc, link)
+                )
+        else:
+            file_path = self._downloaded[link.url]
+            if hashes:
+                hashes.check_against_path(file_path)
+            local_file = File(file_path, content_type=None)
+
+        # If download_info is set, we got it from the wheel cache.
+        if req.download_info is None:
+            # Editables don't go through this function (see
+            # prepare_editable_requirement).
+            assert not req.editable
+            req.download_info = direct_url_from_link(link, req.source_dir)
+            # Make sure we have a hash in download_info. If we got it as part of the
+            # URL, it will have been verified and we can rely on it. Otherwise we
+            # compute it from the downloaded file.
+            if (
+                isinstance(req.download_info.info, ArchiveInfo)
+                and not req.download_info.info.hash
+                and local_file
+            ):
+                hash = hash_file(local_file.path)[0].hexdigest()
+                req.download_info.info.hash = f"sha256={hash}"
+
+        # For use in later processing,
+        # preserve the file path on the requirement.
+        if local_file:
+            req.local_file_path = local_file.path
+
+        dist = _get_prepared_distribution(
+            req,
+            self.build_tracker,
+            self.finder,
+            self.build_isolation,
+            self.check_build_deps,
+        )
+        return dist
+
+    def save_linked_requirement(self, req: InstallRequirement) -> None:
+        assert self.download_dir is not None
+        assert req.link is not None
+        link = req.link
+        if link.is_vcs or (link.is_existing_dir() and req.editable):
+            # Make a .zip of the source_dir we already created.
+            req.archive(self.download_dir)
+            return
+
+        if link.is_existing_dir():
+            logger.debug(
+                "Not copying link to destination directory "
+                "since it is a directory: %s",
+                link,
+            )
+            return
+        if req.local_file_path is None:
+            # No distribution was downloaded for this requirement.
+            return
+
+        download_location = os.path.join(self.download_dir, link.filename)
+        if not os.path.exists(download_location):
+            shutil.copy(req.local_file_path, download_location)
+            download_path = display_path(download_location)
+            logger.info("Saved %s", download_path)
+
+    def prepare_editable_requirement(
+        self,
+        req: InstallRequirement,
+    ) -> BaseDistribution:
+        """Prepare an editable requirement."""
+        assert req.editable, "cannot prepare a non-editable req as editable"
+
+        logger.info("Obtaining %s", req)
+
+        with indent_log():
+            if self.require_hashes:
+                raise InstallationError(
+                    "The editable requirement {} cannot be installed when "
+                    "requiring hashes, because there is no single file to "
+                    "hash.".format(req)
+                )
+            req.ensure_has_source_dir(self.src_dir)
+            req.update_editable()
+            assert req.source_dir
+            req.download_info = direct_url_for_editable(req.unpacked_source_directory)
+
+            dist = _get_prepared_distribution(
+                req,
+                self.build_tracker,
+                self.finder,
+                self.build_isolation,
+                self.check_build_deps,
+            )
+
+            req.check_if_exists(self.use_user_site)
+
+        return dist
+
+    def prepare_installed_requirement(
+        self,
+        req: InstallRequirement,
+        skip_reason: str,
+    ) -> BaseDistribution:
+        """Prepare an already-installed requirement."""
+        assert req.satisfied_by, "req should have been satisfied but isn't"
+        assert skip_reason is not None, (
+            "did not get skip reason skipped but req.satisfied_by "
+            "is set to {}".format(req.satisfied_by)
+        )
+        logger.info(
+            "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
+        )
+        with indent_log():
+            if self.require_hashes:
+                logger.debug(
+                    "Since it is already installed, we are trusting this "
+                    "package without checking its hash. To ensure a "
+                    "completely repeatable environment, install into an "
+                    "empty virtualenv."
+                )
+            return InstalledDistribution(req).get_metadata_distribution()
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/pyproject.py b/venv/lib/python3.9/site-packages/pip/_internal/pyproject.py
new file mode 100644
index 0000000..1e9119f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/pyproject.py
@@ -0,0 +1,175 @@
+import importlib.util
+import os
+from collections import namedtuple
+from typing import Any, List, Optional
+
+from pip._vendor import tomli
+from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+
+from pip._internal.exceptions import (
+    InstallationError,
+    InvalidPyProjectBuildRequires,
+    MissingPyProjectBuildRequires,
+)
+
+
+def _is_list_of_str(obj: Any) -> bool:
+    return isinstance(obj, list) and all(isinstance(item, str) for item in obj)
+
+
+def make_pyproject_path(unpacked_source_directory: str) -> str:
+    return os.path.join(unpacked_source_directory, "pyproject.toml")
+
+
+BuildSystemDetails = namedtuple(
+    "BuildSystemDetails", ["requires", "backend", "check", "backend_path"]
+)
+
+
+def load_pyproject_toml(
+    use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str
+) -> Optional[BuildSystemDetails]:
+    """Load the pyproject.toml file.
+
+    Parameters:
+        use_pep517 - Has the user requested PEP 517 processing? None
+                     means the user hasn't explicitly specified.
+        pyproject_toml - Location of the project's pyproject.toml file
+        setup_py - Location of the project's setup.py file
+        req_name - The name of the requirement we're processing (for
+                   error reporting)
+
+    Returns:
+        None if we should use the legacy code path, otherwise a tuple
+        (
+            requirements from pyproject.toml,
+            name of PEP 517 backend,
+            requirements we should check are installed after setting
+                up the build environment
+            directory paths to import the backend from (backend-path),
+                relative to the project root.
+        )
+    """
+    has_pyproject = os.path.isfile(pyproject_toml)
+    has_setup = os.path.isfile(setup_py)
+
+    if not has_pyproject and not has_setup:
+        raise InstallationError(
+            f"{req_name} does not appear to be a Python project: "
+            f"neither 'setup.py' nor 'pyproject.toml' found."
+        )
+
+    if has_pyproject:
+        with open(pyproject_toml, encoding="utf-8") as f:
+            pp_toml = tomli.loads(f.read())
+        build_system = pp_toml.get("build-system")
+    else:
+        build_system = None
+
+    # The following cases must use PEP 517
+    # We check for use_pep517 being non-None and falsey because that means
+    # the user explicitly requested --no-use-pep517.  The value 0 as
+    # opposed to False can occur when the value is provided via an
+    # environment variable or config file option (due to the quirk of
+    # strtobool() returning an integer in pip's configuration code).
+    if has_pyproject and not has_setup:
+        if use_pep517 is not None and not use_pep517:
+            raise InstallationError(
+                "Disabling PEP 517 processing is invalid: "
+                "project does not have a setup.py"
+            )
+        use_pep517 = True
+    elif build_system and "build-backend" in build_system:
+        if use_pep517 is not None and not use_pep517:
+            raise InstallationError(
+                "Disabling PEP 517 processing is invalid: "
+                "project specifies a build backend of {} "
+                "in pyproject.toml".format(build_system["build-backend"])
+            )
+        use_pep517 = True
+
+    # If we haven't worked out whether to use PEP 517 yet,
+    # and the user hasn't explicitly stated a preference,
+    # we do so if the project has a pyproject.toml file
+    # or if we cannot import setuptools.
+
+    # We fallback to PEP 517 when without setuptools,
+    # so setuptools can be installed as a default build backend.
+    # For more info see:
+    # https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9
+    elif use_pep517 is None:
+        use_pep517 = has_pyproject or not importlib.util.find_spec("setuptools")
+
+    # At this point, we know whether we're going to use PEP 517.
+    assert use_pep517 is not None
+
+    # If we're using the legacy code path, there is nothing further
+    # for us to do here.
+    if not use_pep517:
+        return None
+
+    if build_system is None:
+        # Either the user has a pyproject.toml with no build-system
+        # section, or the user has no pyproject.toml, but has opted in
+        # explicitly via --use-pep517.
+        # In the absence of any explicit backend specification, we
+        # assume the setuptools backend that most closely emulates the
+        # traditional direct setup.py execution, and require wheel and
+        # a version of setuptools that supports that backend.
+
+        build_system = {
+            "requires": ["setuptools>=40.8.0", "wheel"],
+            "build-backend": "setuptools.build_meta:__legacy__",
+        }
+
+    # If we're using PEP 517, we have build system information (either
+    # from pyproject.toml, or defaulted by the code above).
+    # Note that at this point, we do not know if the user has actually
+    # specified a backend, though.
+    assert build_system is not None
+
+    # Ensure that the build-system section in pyproject.toml conforms
+    # to PEP 518.
+
+    # Specifying the build-system table but not the requires key is invalid
+    if "requires" not in build_system:
+        raise MissingPyProjectBuildRequires(package=req_name)
+
+    # Error out if requires is not a list of strings
+    requires = build_system["requires"]
+    if not _is_list_of_str(requires):
+        raise InvalidPyProjectBuildRequires(
+            package=req_name,
+            reason="It is not a list of strings.",
+        )
+
+    # Each requirement must be valid as per PEP 508
+    for requirement in requires:
+        try:
+            Requirement(requirement)
+        except InvalidRequirement as error:
+            raise InvalidPyProjectBuildRequires(
+                package=req_name,
+                reason=f"It contains an invalid requirement: {requirement!r}",
+            ) from error
+
+    backend = build_system.get("build-backend")
+    backend_path = build_system.get("backend-path", [])
+    check: List[str] = []
+    if backend is None:
+        # If the user didn't specify a backend, we assume they want to use
+        # the setuptools backend. But we can't be sure they have included
+        # a version of setuptools which supplies the backend, or wheel
+        # (which is needed by the backend) in their requirements. So we
+        # make a note to check that those requirements are present once
+        # we have set up the environment.
+        # This is quite a lot of work to check for a very specific case. But
+        # the problem is, that case is potentially quite common - projects that
+        # adopted PEP 518 early for the ability to specify requirements to
+        # execute setup.py, but never considered needing to mention the build
+        # tools themselves. The original PEP 518 code had a similar check (but
+        # implemented in a different way).
+        backend = "setuptools.build_meta:__legacy__"
+        check = ["setuptools>=40.8.0", "wheel"]
+
+    return BuildSystemDetails(requires, backend, check, backend_path)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/req/__init__.py
new file mode 100644
index 0000000..8d56359
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/__init__.py
@@ -0,0 +1,94 @@
+import collections
+import logging
+from typing import Generator, List, Optional, Sequence, Tuple
+
+from pip._internal.utils.logging import indent_log
+
+from .req_file import parse_requirements
+from .req_install import InstallRequirement
+from .req_set import RequirementSet
+
+__all__ = [
+    "RequirementSet",
+    "InstallRequirement",
+    "parse_requirements",
+    "install_given_reqs",
+]
+
+logger = logging.getLogger(__name__)
+
+
+class InstallationResult:
+    def __init__(self, name: str) -> None:
+        self.name = name
+
+    def __repr__(self) -> str:
+        return f"InstallationResult(name={self.name!r})"
+
+
+def _validate_requirements(
+    requirements: List[InstallRequirement],
+) -> Generator[Tuple[str, InstallRequirement], None, None]:
+    for req in requirements:
+        assert req.name, f"invalid to-be-installed requirement: {req}"
+        yield req.name, req
+
+
+def install_given_reqs(
+    requirements: List[InstallRequirement],
+    install_options: List[str],
+    global_options: Sequence[str],
+    root: Optional[str],
+    home: Optional[str],
+    prefix: Optional[str],
+    warn_script_location: bool,
+    use_user_site: bool,
+    pycompile: bool,
+) -> List[InstallationResult]:
+    """
+    Install everything in the given list.
+
+    (to be called after having downloaded and unpacked the packages)
+    """
+    to_install = collections.OrderedDict(_validate_requirements(requirements))
+
+    if to_install:
+        logger.info(
+            "Installing collected packages: %s",
+            ", ".join(to_install.keys()),
+        )
+
+    installed = []
+
+    with indent_log():
+        for req_name, requirement in to_install.items():
+            if requirement.should_reinstall:
+                logger.info("Attempting uninstall: %s", req_name)
+                with indent_log():
+                    uninstalled_pathset = requirement.uninstall(auto_confirm=True)
+            else:
+                uninstalled_pathset = None
+
+            try:
+                requirement.install(
+                    install_options,
+                    global_options,
+                    root=root,
+                    home=home,
+                    prefix=prefix,
+                    warn_script_location=warn_script_location,
+                    use_user_site=use_user_site,
+                    pycompile=pycompile,
+                )
+            except Exception:
+                # if install did not succeed, rollback previous uninstall
+                if uninstalled_pathset and not requirement.install_succeeded:
+                    uninstalled_pathset.rollback()
+                raise
+            else:
+                if uninstalled_pathset and requirement.install_succeeded:
+                    uninstalled_pathset.commit()
+
+            installed.append(InstallationResult(req_name))
+
+    return installed
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..72c5a03
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/constructors.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/constructors.cpython-39.pyc
new file mode 100644
index 0000000..dcdabe5
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/constructors.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_file.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_file.cpython-39.pyc
new file mode 100644
index 0000000..544550a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_file.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_install.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_install.cpython-39.pyc
new file mode 100644
index 0000000..6108755
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_install.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_set.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_set.cpython-39.pyc
new file mode 100644
index 0000000..8409489
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_set.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc
new file mode 100644
index 0000000..18a8551
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/constructors.py b/venv/lib/python3.9/site-packages/pip/_internal/req/constructors.py
new file mode 100644
index 0000000..dea7c3b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/constructors.py
@@ -0,0 +1,501 @@
+"""Backing implementation for InstallRequirement's various constructors
+
+The idea here is that these formed a major chunk of InstallRequirement's size
+so, moving them and support code dedicated to them outside of that class
+helps creates for better understandability for the rest of the code.
+
+These are meant to be used elsewhere within pip to create instances of
+InstallRequirement.
+"""
+
+import logging
+import os
+import re
+from typing import Any, Dict, Optional, Set, Tuple, Union
+
+from pip._vendor.packaging.markers import Marker
+from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+from pip._vendor.packaging.specifiers import Specifier
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.models.index import PyPI, TestPyPI
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.req.req_file import ParsedRequirement
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.filetypes import is_archive_file
+from pip._internal.utils.misc import is_installable_dir
+from pip._internal.utils.packaging import get_requirement
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs import is_url, vcs
+
+__all__ = [
+    "install_req_from_editable",
+    "install_req_from_line",
+    "parse_editable",
+]
+
+logger = logging.getLogger(__name__)
+operators = Specifier._operators.keys()
+
+
+def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
+    m = re.match(r"^(.+)(\[[^\]]+\])$", path)
+    extras = None
+    if m:
+        path_no_extras = m.group(1)
+        extras = m.group(2)
+    else:
+        path_no_extras = path
+
+    return path_no_extras, extras
+
+
+def convert_extras(extras: Optional[str]) -> Set[str]:
+    if not extras:
+        return set()
+    return get_requirement("placeholder" + extras.lower()).extras
+
+
+def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
+    """Parses an editable requirement into:
+        - a requirement name
+        - an URL
+        - extras
+        - editable options
+    Accepted requirements:
+        svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
+        .[some_extra]
+    """
+
+    url = editable_req
+
+    # If a file path is specified with extras, strip off the extras.
+    url_no_extras, extras = _strip_extras(url)
+
+    if os.path.isdir(url_no_extras):
+        # Treating it as code that has already been checked out
+        url_no_extras = path_to_url(url_no_extras)
+
+    if url_no_extras.lower().startswith("file:"):
+        package_name = Link(url_no_extras).egg_fragment
+        if extras:
+            return (
+                package_name,
+                url_no_extras,
+                get_requirement("placeholder" + extras.lower()).extras,
+            )
+        else:
+            return package_name, url_no_extras, set()
+
+    for version_control in vcs:
+        if url.lower().startswith(f"{version_control}:"):
+            url = f"{version_control}+{url}"
+            break
+
+    link = Link(url)
+
+    if not link.is_vcs:
+        backends = ", ".join(vcs.all_schemes)
+        raise InstallationError(
+            f"{editable_req} is not a valid editable requirement. "
+            f"It should either be a path to a local project or a VCS URL "
+            f"(beginning with {backends})."
+        )
+
+    package_name = link.egg_fragment
+    if not package_name:
+        raise InstallationError(
+            "Could not detect requirement name for '{}', please specify one "
+            "with #egg=your_package_name".format(editable_req)
+        )
+    return package_name, url, set()
+
+
+def check_first_requirement_in_file(filename: str) -> None:
+    """Check if file is parsable as a requirements file.
+
+    This is heavily based on ``pkg_resources.parse_requirements``, but
+    simplified to just check the first meaningful line.
+
+    :raises InvalidRequirement: If the first meaningful line cannot be parsed
+        as an requirement.
+    """
+    with open(filename, encoding="utf-8", errors="ignore") as f:
+        # Create a steppable iterator, so we can handle \-continuations.
+        lines = (
+            line
+            for line in (line.strip() for line in f)
+            if line and not line.startswith("#")  # Skip blank lines/comments.
+        )
+
+        for line in lines:
+            # Drop comments -- a hash without a space may be in a URL.
+            if " #" in line:
+                line = line[: line.find(" #")]
+            # If there is a line continuation, drop it, and append the next line.
+            if line.endswith("\\"):
+                line = line[:-2].strip() + next(lines, "")
+            Requirement(line)
+            return
+
+
+def deduce_helpful_msg(req: str) -> str:
+    """Returns helpful msg in case requirements file does not exist,
+    or cannot be parsed.
+
+    :params req: Requirements file path
+    """
+    if not os.path.exists(req):
+        return f" File '{req}' does not exist."
+    msg = " The path does exist. "
+    # Try to parse and check if it is a requirements file.
+    try:
+        check_first_requirement_in_file(req)
+    except InvalidRequirement:
+        logger.debug("Cannot parse '%s' as requirements file", req)
+    else:
+        msg += (
+            f"The argument you provided "
+            f"({req}) appears to be a"
+            f" requirements file. If that is the"
+            f" case, use the '-r' flag to install"
+            f" the packages specified within it."
+        )
+    return msg
+
+
+class RequirementParts:
+    def __init__(
+        self,
+        requirement: Optional[Requirement],
+        link: Optional[Link],
+        markers: Optional[Marker],
+        extras: Set[str],
+    ):
+        self.requirement = requirement
+        self.link = link
+        self.markers = markers
+        self.extras = extras
+
+
+def parse_req_from_editable(editable_req: str) -> RequirementParts:
+    name, url, extras_override = parse_editable(editable_req)
+
+    if name is not None:
+        try:
+            req: Optional[Requirement] = Requirement(name)
+        except InvalidRequirement:
+            raise InstallationError(f"Invalid requirement: '{name}'")
+    else:
+        req = None
+
+    link = Link(url)
+
+    return RequirementParts(req, link, None, extras_override)
+
+
+# ---- The actual constructors follow ----
+
+
+def install_req_from_editable(
+    editable_req: str,
+    comes_from: Optional[Union[InstallRequirement, str]] = None,
+    use_pep517: Optional[bool] = None,
+    isolated: bool = False,
+    options: Optional[Dict[str, Any]] = None,
+    constraint: bool = False,
+    user_supplied: bool = False,
+    permit_editable_wheels: bool = False,
+    config_settings: Optional[Dict[str, str]] = None,
+) -> InstallRequirement:
+
+    parts = parse_req_from_editable(editable_req)
+
+    return InstallRequirement(
+        parts.requirement,
+        comes_from=comes_from,
+        user_supplied=user_supplied,
+        editable=True,
+        permit_editable_wheels=permit_editable_wheels,
+        link=parts.link,
+        constraint=constraint,
+        use_pep517=use_pep517,
+        isolated=isolated,
+        install_options=options.get("install_options", []) if options else [],
+        global_options=options.get("global_options", []) if options else [],
+        hash_options=options.get("hashes", {}) if options else {},
+        config_settings=config_settings,
+        extras=parts.extras,
+    )
+
+
+def _looks_like_path(name: str) -> bool:
+    """Checks whether the string "looks like" a path on the filesystem.
+
+    This does not check whether the target actually exists, only judge from the
+    appearance.
+
+    Returns true if any of the following conditions is true:
+    * a path separator is found (either os.path.sep or os.path.altsep);
+    * a dot is found (which represents the current directory).
+    """
+    if os.path.sep in name:
+        return True
+    if os.path.altsep is not None and os.path.altsep in name:
+        return True
+    if name.startswith("."):
+        return True
+    return False
+
+
+def _get_url_from_path(path: str, name: str) -> Optional[str]:
+    """
+    First, it checks whether a provided path is an installable directory. If it
+    is, returns the path.
+
+    If false, check if the path is an archive file (such as a .whl).
+    The function checks if the path is a file. If false, if the path has
+    an @, it will treat it as a PEP 440 URL requirement and return the path.
+    """
+    if _looks_like_path(name) and os.path.isdir(path):
+        if is_installable_dir(path):
+            return path_to_url(path)
+        # TODO: The is_installable_dir test here might not be necessary
+        #       now that it is done in load_pyproject_toml too.
+        raise InstallationError(
+            f"Directory {name!r} is not installable. Neither 'setup.py' "
+            "nor 'pyproject.toml' found."
+        )
+    if not is_archive_file(path):
+        return None
+    if os.path.isfile(path):
+        return path_to_url(path)
+    urlreq_parts = name.split("@", 1)
+    if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
+        # If the path contains '@' and the part before it does not look
+        # like a path, try to treat it as a PEP 440 URL req instead.
+        return None
+    logger.warning(
+        "Requirement %r looks like a filename, but the file does not exist",
+        name,
+    )
+    return path_to_url(path)
+
+
+def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts:
+    if is_url(name):
+        marker_sep = "; "
+    else:
+        marker_sep = ";"
+    if marker_sep in name:
+        name, markers_as_string = name.split(marker_sep, 1)
+        markers_as_string = markers_as_string.strip()
+        if not markers_as_string:
+            markers = None
+        else:
+            markers = Marker(markers_as_string)
+    else:
+        markers = None
+    name = name.strip()
+    req_as_string = None
+    path = os.path.normpath(os.path.abspath(name))
+    link = None
+    extras_as_string = None
+
+    if is_url(name):
+        link = Link(name)
+    else:
+        p, extras_as_string = _strip_extras(path)
+        url = _get_url_from_path(p, name)
+        if url is not None:
+            link = Link(url)
+
+    # it's a local file, dir, or url
+    if link:
+        # Handle relative file URLs
+        if link.scheme == "file" and re.search(r"\.\./", link.url):
+            link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path))))
+        # wheel file
+        if link.is_wheel:
+            wheel = Wheel(link.filename)  # can raise InvalidWheelFilename
+            req_as_string = f"{wheel.name}=={wheel.version}"
+        else:
+            # set the req to the egg fragment.  when it's not there, this
+            # will become an 'unnamed' requirement
+            req_as_string = link.egg_fragment
+
+    # a requirement specifier
+    else:
+        req_as_string = name
+
+    extras = convert_extras(extras_as_string)
+
+    def with_source(text: str) -> str:
+        if not line_source:
+            return text
+        return f"{text} (from {line_source})"
+
+    def _parse_req_string(req_as_string: str) -> Requirement:
+        try:
+            req = get_requirement(req_as_string)
+        except InvalidRequirement:
+            if os.path.sep in req_as_string:
+                add_msg = "It looks like a path."
+                add_msg += deduce_helpful_msg(req_as_string)
+            elif "=" in req_as_string and not any(
+                op in req_as_string for op in operators
+            ):
+                add_msg = "= is not a valid operator. Did you mean == ?"
+            else:
+                add_msg = ""
+            msg = with_source(f"Invalid requirement: {req_as_string!r}")
+            if add_msg:
+                msg += f"\nHint: {add_msg}"
+            raise InstallationError(msg)
+        else:
+            # Deprecate extras after specifiers: "name>=1.0[extras]"
+            # This currently works by accident because _strip_extras() parses
+            # any extras in the end of the string and those are saved in
+            # RequirementParts
+            for spec in req.specifier:
+                spec_str = str(spec)
+                if spec_str.endswith("]"):
+                    msg = f"Extras after version '{spec_str}'."
+                    raise InstallationError(msg)
+        return req
+
+    if req_as_string is not None:
+        req: Optional[Requirement] = _parse_req_string(req_as_string)
+    else:
+        req = None
+
+    return RequirementParts(req, link, markers, extras)
+
+
+def install_req_from_line(
+    name: str,
+    comes_from: Optional[Union[str, InstallRequirement]] = None,
+    use_pep517: Optional[bool] = None,
+    isolated: bool = False,
+    options: Optional[Dict[str, Any]] = None,
+    constraint: bool = False,
+    line_source: Optional[str] = None,
+    user_supplied: bool = False,
+    config_settings: Optional[Dict[str, str]] = None,
+) -> InstallRequirement:
+    """Creates an InstallRequirement from a name, which might be a
+    requirement, directory containing 'setup.py', filename, or URL.
+
+    :param line_source: An optional string describing where the line is from,
+        for logging purposes in case of an error.
+    """
+    parts = parse_req_from_line(name, line_source)
+
+    return InstallRequirement(
+        parts.requirement,
+        comes_from,
+        link=parts.link,
+        markers=parts.markers,
+        use_pep517=use_pep517,
+        isolated=isolated,
+        install_options=options.get("install_options", []) if options else [],
+        global_options=options.get("global_options", []) if options else [],
+        hash_options=options.get("hashes", {}) if options else {},
+        config_settings=config_settings,
+        constraint=constraint,
+        extras=parts.extras,
+        user_supplied=user_supplied,
+    )
+
+
+def install_req_from_req_string(
+    req_string: str,
+    comes_from: Optional[InstallRequirement] = None,
+    isolated: bool = False,
+    use_pep517: Optional[bool] = None,
+    user_supplied: bool = False,
+    config_settings: Optional[Dict[str, str]] = None,
+) -> InstallRequirement:
+    try:
+        req = get_requirement(req_string)
+    except InvalidRequirement:
+        raise InstallationError(f"Invalid requirement: '{req_string}'")
+
+    domains_not_allowed = [
+        PyPI.file_storage_domain,
+        TestPyPI.file_storage_domain,
+    ]
+    if (
+        req.url
+        and comes_from
+        and comes_from.link
+        and comes_from.link.netloc in domains_not_allowed
+    ):
+        # Explicitly disallow pypi packages that depend on external urls
+        raise InstallationError(
+            "Packages installed from PyPI cannot depend on packages "
+            "which are not also hosted on PyPI.\n"
+            "{} depends on {} ".format(comes_from.name, req)
+        )
+
+    return InstallRequirement(
+        req,
+        comes_from,
+        isolated=isolated,
+        use_pep517=use_pep517,
+        user_supplied=user_supplied,
+        config_settings=config_settings,
+    )
+
+
+def install_req_from_parsed_requirement(
+    parsed_req: ParsedRequirement,
+    isolated: bool = False,
+    use_pep517: Optional[bool] = None,
+    user_supplied: bool = False,
+    config_settings: Optional[Dict[str, str]] = None,
+) -> InstallRequirement:
+    if parsed_req.is_editable:
+        req = install_req_from_editable(
+            parsed_req.requirement,
+            comes_from=parsed_req.comes_from,
+            use_pep517=use_pep517,
+            constraint=parsed_req.constraint,
+            isolated=isolated,
+            user_supplied=user_supplied,
+            config_settings=config_settings,
+        )
+
+    else:
+        req = install_req_from_line(
+            parsed_req.requirement,
+            comes_from=parsed_req.comes_from,
+            use_pep517=use_pep517,
+            isolated=isolated,
+            options=parsed_req.options,
+            constraint=parsed_req.constraint,
+            line_source=parsed_req.line_source,
+            user_supplied=user_supplied,
+            config_settings=config_settings,
+        )
+    return req
+
+
+def install_req_from_link_and_ireq(
+    link: Link, ireq: InstallRequirement
+) -> InstallRequirement:
+    return InstallRequirement(
+        req=ireq.req,
+        comes_from=ireq.comes_from,
+        editable=ireq.editable,
+        link=link,
+        markers=ireq.markers,
+        use_pep517=ireq.use_pep517,
+        isolated=ireq.isolated,
+        install_options=ireq.install_options,
+        global_options=ireq.global_options,
+        hash_options=ireq.hash_options,
+        config_settings=ireq.config_settings,
+        user_supplied=ireq.user_supplied,
+    )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/req_file.py b/venv/lib/python3.9/site-packages/pip/_internal/req/req_file.py
new file mode 100644
index 0000000..11ec699
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/req_file.py
@@ -0,0 +1,544 @@
+"""
+Requirements file parsing
+"""
+
+import optparse
+import os
+import re
+import shlex
+import urllib.parse
+from optparse import Values
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    Generator,
+    Iterable,
+    List,
+    Optional,
+    Tuple,
+)
+
+from pip._internal.cli import cmdoptions
+from pip._internal.exceptions import InstallationError, RequirementsFileParseError
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.network.session import PipSession
+from pip._internal.network.utils import raise_for_status
+from pip._internal.utils.encoding import auto_decode
+from pip._internal.utils.urls import get_url_scheme
+
+if TYPE_CHECKING:
+    # NoReturn introduced in 3.6.2; imported only for type checking to maintain
+    # pip compatibility with older patch versions of Python 3.6
+    from typing import NoReturn
+
+    from pip._internal.index.package_finder import PackageFinder
+
+__all__ = ["parse_requirements"]
+
+ReqFileLines = Iterable[Tuple[int, str]]
+
+LineParser = Callable[[str], Tuple[str, Values]]
+
+SCHEME_RE = re.compile(r"^(http|https|file):", re.I)
+COMMENT_RE = re.compile(r"(^|\s+)#.*$")
+
+# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
+# variable name consisting of only uppercase letters, digits or the '_'
+# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
+# 2013 Edition.
+ENV_VAR_RE = re.compile(r"(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})")
+
+SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
+    cmdoptions.index_url,
+    cmdoptions.extra_index_url,
+    cmdoptions.no_index,
+    cmdoptions.constraints,
+    cmdoptions.requirements,
+    cmdoptions.editable,
+    cmdoptions.find_links,
+    cmdoptions.no_binary,
+    cmdoptions.only_binary,
+    cmdoptions.prefer_binary,
+    cmdoptions.require_hashes,
+    cmdoptions.pre,
+    cmdoptions.trusted_host,
+    cmdoptions.use_new_feature,
+]
+
+# options to be passed to requirements
+SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
+    cmdoptions.install_options,
+    cmdoptions.global_options,
+    cmdoptions.hash,
+]
+
+# the 'dest' string values
+SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
+
+
+class ParsedRequirement:
+    def __init__(
+        self,
+        requirement: str,
+        is_editable: bool,
+        comes_from: str,
+        constraint: bool,
+        options: Optional[Dict[str, Any]] = None,
+        line_source: Optional[str] = None,
+    ) -> None:
+        self.requirement = requirement
+        self.is_editable = is_editable
+        self.comes_from = comes_from
+        self.options = options
+        self.constraint = constraint
+        self.line_source = line_source
+
+
+class ParsedLine:
+    def __init__(
+        self,
+        filename: str,
+        lineno: int,
+        args: str,
+        opts: Values,
+        constraint: bool,
+    ) -> None:
+        self.filename = filename
+        self.lineno = lineno
+        self.opts = opts
+        self.constraint = constraint
+
+        if args:
+            self.is_requirement = True
+            self.is_editable = False
+            self.requirement = args
+        elif opts.editables:
+            self.is_requirement = True
+            self.is_editable = True
+            # We don't support multiple -e on one line
+            self.requirement = opts.editables[0]
+        else:
+            self.is_requirement = False
+
+
+def parse_requirements(
+    filename: str,
+    session: PipSession,
+    finder: Optional["PackageFinder"] = None,
+    options: Optional[optparse.Values] = None,
+    constraint: bool = False,
+) -> Generator[ParsedRequirement, None, None]:
+    """Parse a requirements file and yield ParsedRequirement instances.
+
+    :param filename:    Path or url of requirements file.
+    :param session:     PipSession instance.
+    :param finder:      Instance of pip.index.PackageFinder.
+    :param options:     cli options.
+    :param constraint:  If true, parsing a constraint file rather than
+        requirements file.
+    """
+    line_parser = get_line_parser(finder)
+    parser = RequirementsFileParser(session, line_parser)
+
+    for parsed_line in parser.parse(filename, constraint):
+        parsed_req = handle_line(
+            parsed_line, options=options, finder=finder, session=session
+        )
+        if parsed_req is not None:
+            yield parsed_req
+
+
+def preprocess(content: str) -> ReqFileLines:
+    """Split, filter, and join lines, and return a line iterator
+
+    :param content: the content of the requirements file
+    """
+    lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1)
+    lines_enum = join_lines(lines_enum)
+    lines_enum = ignore_comments(lines_enum)
+    lines_enum = expand_env_variables(lines_enum)
+    return lines_enum
+
+
+def handle_requirement_line(
+    line: ParsedLine,
+    options: Optional[optparse.Values] = None,
+) -> ParsedRequirement:
+
+    # preserve for the nested code path
+    line_comes_from = "{} {} (line {})".format(
+        "-c" if line.constraint else "-r",
+        line.filename,
+        line.lineno,
+    )
+
+    assert line.is_requirement
+
+    if line.is_editable:
+        # For editable requirements, we don't support per-requirement
+        # options, so just return the parsed requirement.
+        return ParsedRequirement(
+            requirement=line.requirement,
+            is_editable=line.is_editable,
+            comes_from=line_comes_from,
+            constraint=line.constraint,
+        )
+    else:
+        # get the options that apply to requirements
+        req_options = {}
+        for dest in SUPPORTED_OPTIONS_REQ_DEST:
+            if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
+                req_options[dest] = line.opts.__dict__[dest]
+
+        line_source = f"line {line.lineno} of {line.filename}"
+        return ParsedRequirement(
+            requirement=line.requirement,
+            is_editable=line.is_editable,
+            comes_from=line_comes_from,
+            constraint=line.constraint,
+            options=req_options,
+            line_source=line_source,
+        )
+
+
+def handle_option_line(
+    opts: Values,
+    filename: str,
+    lineno: int,
+    finder: Optional["PackageFinder"] = None,
+    options: Optional[optparse.Values] = None,
+    session: Optional[PipSession] = None,
+) -> None:
+
+    if options:
+        # percolate options upward
+        if opts.require_hashes:
+            options.require_hashes = opts.require_hashes
+        if opts.features_enabled:
+            options.features_enabled.extend(
+                f for f in opts.features_enabled if f not in options.features_enabled
+            )
+
+    # set finder options
+    if finder:
+        find_links = finder.find_links
+        index_urls = finder.index_urls
+        no_index = finder.search_scope.no_index
+        if opts.no_index is True:
+            no_index = True
+            index_urls = []
+        if opts.index_url and not no_index:
+            index_urls = [opts.index_url]
+        if opts.extra_index_urls and not no_index:
+            index_urls.extend(opts.extra_index_urls)
+        if opts.find_links:
+            # FIXME: it would be nice to keep track of the source
+            # of the find_links: support a find-links local path
+            # relative to a requirements file.
+            value = opts.find_links[0]
+            req_dir = os.path.dirname(os.path.abspath(filename))
+            relative_to_reqs_file = os.path.join(req_dir, value)
+            if os.path.exists(relative_to_reqs_file):
+                value = relative_to_reqs_file
+            find_links.append(value)
+
+        if session:
+            # We need to update the auth urls in session
+            session.update_index_urls(index_urls)
+
+        search_scope = SearchScope(
+            find_links=find_links,
+            index_urls=index_urls,
+            no_index=no_index,
+        )
+        finder.search_scope = search_scope
+
+        if opts.pre:
+            finder.set_allow_all_prereleases()
+
+        if opts.prefer_binary:
+            finder.set_prefer_binary()
+
+        if session:
+            for host in opts.trusted_hosts or []:
+                source = f"line {lineno} of {filename}"
+                session.add_trusted_host(host, source=source)
+
+
+def handle_line(
+    line: ParsedLine,
+    options: Optional[optparse.Values] = None,
+    finder: Optional["PackageFinder"] = None,
+    session: Optional[PipSession] = None,
+) -> Optional[ParsedRequirement]:
+    """Handle a single parsed requirements line; This can result in
+    creating/yielding requirements, or updating the finder.
+
+    :param line:        The parsed line to be processed.
+    :param options:     CLI options.
+    :param finder:      The finder - updated by non-requirement lines.
+    :param session:     The session - updated by non-requirement lines.
+
+    Returns a ParsedRequirement object if the line is a requirement line,
+    otherwise returns None.
+
+    For lines that contain requirements, the only options that have an effect
+    are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
+    requirement. Other options from SUPPORTED_OPTIONS may be present, but are
+    ignored.
+
+    For lines that do not contain requirements, the only options that have an
+    effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
+    be present, but are ignored. These lines may contain multiple options
+    (although our docs imply only one is supported), and all our parsed and
+    affect the finder.
+    """
+
+    if line.is_requirement:
+        parsed_req = handle_requirement_line(line, options)
+        return parsed_req
+    else:
+        handle_option_line(
+            line.opts,
+            line.filename,
+            line.lineno,
+            finder,
+            options,
+            session,
+        )
+        return None
+
+
+class RequirementsFileParser:
+    def __init__(
+        self,
+        session: PipSession,
+        line_parser: LineParser,
+    ) -> None:
+        self._session = session
+        self._line_parser = line_parser
+
+    def parse(
+        self, filename: str, constraint: bool
+    ) -> Generator[ParsedLine, None, None]:
+        """Parse a given file, yielding parsed lines."""
+        yield from self._parse_and_recurse(filename, constraint)
+
+    def _parse_and_recurse(
+        self, filename: str, constraint: bool
+    ) -> Generator[ParsedLine, None, None]:
+        for line in self._parse_file(filename, constraint):
+            if not line.is_requirement and (
+                line.opts.requirements or line.opts.constraints
+            ):
+                # parse a nested requirements file
+                if line.opts.requirements:
+                    req_path = line.opts.requirements[0]
+                    nested_constraint = False
+                else:
+                    req_path = line.opts.constraints[0]
+                    nested_constraint = True
+
+                # original file is over http
+                if SCHEME_RE.search(filename):
+                    # do a url join so relative paths work
+                    req_path = urllib.parse.urljoin(filename, req_path)
+                # original file and nested file are paths
+                elif not SCHEME_RE.search(req_path):
+                    # do a join so relative paths work
+                    req_path = os.path.join(
+                        os.path.dirname(filename),
+                        req_path,
+                    )
+
+                yield from self._parse_and_recurse(req_path, nested_constraint)
+            else:
+                yield line
+
+    def _parse_file(
+        self, filename: str, constraint: bool
+    ) -> Generator[ParsedLine, None, None]:
+        _, content = get_file_content(filename, self._session)
+
+        lines_enum = preprocess(content)
+
+        for line_number, line in lines_enum:
+            try:
+                args_str, opts = self._line_parser(line)
+            except OptionParsingError as e:
+                # add offending line
+                msg = f"Invalid requirement: {line}\n{e.msg}"
+                raise RequirementsFileParseError(msg)
+
+            yield ParsedLine(
+                filename,
+                line_number,
+                args_str,
+                opts,
+                constraint,
+            )
+
+
+def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser:
+    def parse_line(line: str) -> Tuple[str, Values]:
+        # Build new parser for each line since it accumulates appendable
+        # options.
+        parser = build_parser()
+        defaults = parser.get_default_values()
+        defaults.index_url = None
+        if finder:
+            defaults.format_control = finder.format_control
+
+        args_str, options_str = break_args_options(line)
+
+        try:
+            options = shlex.split(options_str)
+        except ValueError as e:
+            raise OptionParsingError(f"Could not split options: {options_str}") from e
+
+        opts, _ = parser.parse_args(options, defaults)
+
+        return args_str, opts
+
+    return parse_line
+
+
+def break_args_options(line: str) -> Tuple[str, str]:
+    """Break up the line into an args and options string.  We only want to shlex
+    (and then optparse) the options, not the args.  args can contain markers
+    which are corrupted by shlex.
+    """
+    tokens = line.split(" ")
+    args = []
+    options = tokens[:]
+    for token in tokens:
+        if token.startswith("-") or token.startswith("--"):
+            break
+        else:
+            args.append(token)
+            options.pop(0)
+    return " ".join(args), " ".join(options)
+
+
+class OptionParsingError(Exception):
+    def __init__(self, msg: str) -> None:
+        self.msg = msg
+
+
+def build_parser() -> optparse.OptionParser:
+    """
+    Return a parser for parsing requirement lines
+    """
+    parser = optparse.OptionParser(add_help_option=False)
+
+    option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
+    for option_factory in option_factories:
+        option = option_factory()
+        parser.add_option(option)
+
+    # By default optparse sys.exits on parsing errors. We want to wrap
+    # that in our own exception.
+    def parser_exit(self: Any, msg: str) -> "NoReturn":
+        raise OptionParsingError(msg)
+
+    # NOTE: mypy disallows assigning to a method
+    #       https://github.com/python/mypy/issues/2427
+    parser.exit = parser_exit  # type: ignore
+
+    return parser
+
+
+def join_lines(lines_enum: ReqFileLines) -> ReqFileLines:
+    """Joins a line ending in '\' with the previous line (except when following
+    comments).  The joined line takes on the index of the first line.
+    """
+    primary_line_number = None
+    new_line: List[str] = []
+    for line_number, line in lines_enum:
+        if not line.endswith("\\") or COMMENT_RE.match(line):
+            if COMMENT_RE.match(line):
+                # this ensures comments are always matched later
+                line = " " + line
+            if new_line:
+                new_line.append(line)
+                assert primary_line_number is not None
+                yield primary_line_number, "".join(new_line)
+                new_line = []
+            else:
+                yield line_number, line
+        else:
+            if not new_line:
+                primary_line_number = line_number
+            new_line.append(line.strip("\\"))
+
+    # last line contains \
+    if new_line:
+        assert primary_line_number is not None
+        yield primary_line_number, "".join(new_line)
+
+    # TODO: handle space after '\'.
+
+
+def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines:
+    """
+    Strips comments and filter empty lines.
+    """
+    for line_number, line in lines_enum:
+        line = COMMENT_RE.sub("", line)
+        line = line.strip()
+        if line:
+            yield line_number, line
+
+
+def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines:
+    """Replace all environment variables that can be retrieved via `os.getenv`.
+
+    The only allowed format for environment variables defined in the
+    requirement file is `${MY_VARIABLE_1}` to ensure two things:
+
+    1. Strings that contain a `$` aren't accidentally (partially) expanded.
+    2. Ensure consistency across platforms for requirement files.
+
+    These points are the result of a discussion on the `github pull
+    request #3514 <https://github.com/pypa/pip/pull/3514>`_.
+
+    Valid characters in variable names follow the `POSIX standard
+    <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
+    to uppercase letter, digits and the `_` (underscore).
+    """
+    for line_number, line in lines_enum:
+        for env_var, var_name in ENV_VAR_RE.findall(line):
+            value = os.getenv(var_name)
+            if not value:
+                continue
+
+            line = line.replace(env_var, value)
+
+        yield line_number, line
+
+
+def get_file_content(url: str, session: PipSession) -> Tuple[str, str]:
+    """Gets the content of a file; it may be a filename, file: URL, or
+    http: URL.  Returns (location, content).  Content is unicode.
+    Respects # -*- coding: declarations on the retrieved files.
+
+    :param url:         File path or url.
+    :param session:     PipSession instance.
+    """
+    scheme = get_url_scheme(url)
+
+    # Pip has special support for file:// URLs (LocalFSAdapter).
+    if scheme in ["http", "https", "file"]:
+        resp = session.get(url)
+        raise_for_status(resp)
+        return resp.url, resp.text
+
+    # Assume this is a bare path.
+    try:
+        with open(url, "rb") as f:
+            content = auto_decode(f.read())
+    except OSError as exc:
+        raise InstallationError(f"Could not open requirements file: {exc}")
+    return url, content
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/req_install.py b/venv/lib/python3.9/site-packages/pip/_internal/req/req_install.py
new file mode 100644
index 0000000..5f29261
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/req_install.py
@@ -0,0 +1,942 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import functools
+import logging
+import os
+import shutil
+import sys
+import uuid
+import zipfile
+from enum import Enum
+from optparse import Values
+from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
+
+from pip._vendor.packaging.markers import Marker
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.specifiers import SpecifierSet
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import Version
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.pep517.wrappers import Pep517HookCaller
+
+from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
+from pip._internal.exceptions import InstallationError, LegacyInstallFailure
+from pip._internal.locations import get_scheme
+from pip._internal.metadata import (
+    BaseDistribution,
+    get_default_environment,
+    get_directory_distribution,
+    get_wheel_distribution,
+)
+from pip._internal.metadata.base import FilesystemWheel
+from pip._internal.models.direct_url import DirectUrl
+from pip._internal.models.link import Link
+from pip._internal.operations.build.metadata import generate_metadata
+from pip._internal.operations.build.metadata_editable import generate_editable_metadata
+from pip._internal.operations.build.metadata_legacy import (
+    generate_metadata as generate_metadata_legacy,
+)
+from pip._internal.operations.install.editable_legacy import (
+    install_editable as install_editable_legacy,
+)
+from pip._internal.operations.install.legacy import install as install_legacy
+from pip._internal.operations.install.wheel import install_wheel
+from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
+from pip._internal.req.req_uninstall import UninstallPathSet
+from pip._internal.utils.deprecation import LegacyInstallReason, deprecated
+from pip._internal.utils.direct_url_helpers import (
+    direct_url_for_editable,
+    direct_url_from_link,
+)
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.misc import (
+    ConfiguredPep517HookCaller,
+    ask_path_exists,
+    backup_dir,
+    display_path,
+    hide_url,
+    redact_auth_from_url,
+)
+from pip._internal.utils.packaging import safe_extra
+from pip._internal.utils.subprocess import runner_with_spinner_message
+from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
+from pip._internal.utils.virtualenv import running_under_virtualenv
+from pip._internal.vcs import vcs
+
+logger = logging.getLogger(__name__)
+
+
+class InstallRequirement:
+    """
+    Represents something that may be installed later on, may have information
+    about where to fetch the relevant requirement and also contains logic for
+    installing the said requirement.
+    """
+
+    def __init__(
+        self,
+        req: Optional[Requirement],
+        comes_from: Optional[Union[str, "InstallRequirement"]],
+        editable: bool = False,
+        link: Optional[Link] = None,
+        markers: Optional[Marker] = None,
+        use_pep517: Optional[bool] = None,
+        isolated: bool = False,
+        install_options: Optional[List[str]] = None,
+        global_options: Optional[List[str]] = None,
+        hash_options: Optional[Dict[str, List[str]]] = None,
+        config_settings: Optional[Dict[str, str]] = None,
+        constraint: bool = False,
+        extras: Collection[str] = (),
+        user_supplied: bool = False,
+        permit_editable_wheels: bool = False,
+    ) -> None:
+        assert req is None or isinstance(req, Requirement), req
+        self.req = req
+        self.comes_from = comes_from
+        self.constraint = constraint
+        self.editable = editable
+        self.permit_editable_wheels = permit_editable_wheels
+        self.legacy_install_reason: Optional[LegacyInstallReason] = None
+
+        # source_dir is the local directory where the linked requirement is
+        # located, or unpacked. In case unpacking is needed, creating and
+        # populating source_dir is done by the RequirementPreparer. Note this
+        # is not necessarily the directory where pyproject.toml or setup.py is
+        # located - that one is obtained via unpacked_source_directory.
+        self.source_dir: Optional[str] = None
+        if self.editable:
+            assert link
+            if link.is_file:
+                self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
+
+        if link is None and req and req.url:
+            # PEP 508 URL requirement
+            link = Link(req.url)
+        self.link = self.original_link = link
+        self.original_link_is_in_wheel_cache = False
+
+        # Information about the location of the artifact that was downloaded . This
+        # property is guaranteed to be set in resolver results.
+        self.download_info: Optional[DirectUrl] = None
+
+        # Path to any downloaded or already-existing package.
+        self.local_file_path: Optional[str] = None
+        if self.link and self.link.is_file:
+            self.local_file_path = self.link.file_path
+
+        if extras:
+            self.extras = extras
+        elif req:
+            self.extras = {safe_extra(extra) for extra in req.extras}
+        else:
+            self.extras = set()
+        if markers is None and req:
+            markers = req.marker
+        self.markers = markers
+
+        # This holds the Distribution object if this requirement is already installed.
+        self.satisfied_by: Optional[BaseDistribution] = None
+        # Whether the installation process should try to uninstall an existing
+        # distribution before installing this requirement.
+        self.should_reinstall = False
+        # Temporary build location
+        self._temp_build_dir: Optional[TempDirectory] = None
+        # Set to True after successful installation
+        self.install_succeeded: Optional[bool] = None
+        # Supplied options
+        self.install_options = install_options if install_options else []
+        self.global_options = global_options if global_options else []
+        self.hash_options = hash_options if hash_options else {}
+        self.config_settings = config_settings
+        # Set to True after successful preparation of this requirement
+        self.prepared = False
+        # User supplied requirement are explicitly requested for installation
+        # by the user via CLI arguments or requirements files, as opposed to,
+        # e.g. dependencies, extras or constraints.
+        self.user_supplied = user_supplied
+
+        self.isolated = isolated
+        self.build_env: BuildEnvironment = NoOpBuildEnvironment()
+
+        # For PEP 517, the directory where we request the project metadata
+        # gets stored. We need this to pass to build_wheel, so the backend
+        # can ensure that the wheel matches the metadata (see the PEP for
+        # details).
+        self.metadata_directory: Optional[str] = None
+
+        # The static build requirements (from pyproject.toml)
+        self.pyproject_requires: Optional[List[str]] = None
+
+        # Build requirements that we will check are available
+        self.requirements_to_check: List[str] = []
+
+        # The PEP 517 backend we should use to build the project
+        self.pep517_backend: Optional[Pep517HookCaller] = None
+
+        # Are we using PEP 517 for this requirement?
+        # After pyproject.toml has been loaded, the only valid values are True
+        # and False. Before loading, None is valid (meaning "use the default").
+        # Setting an explicit value before loading pyproject.toml is supported,
+        # but after loading this flag should be treated as read only.
+        self.use_pep517 = use_pep517
+
+        # This requirement needs more preparation before it can be built
+        self.needs_more_preparation = False
+
+    def __str__(self) -> str:
+        if self.req:
+            s = str(self.req)
+            if self.link:
+                s += " from {}".format(redact_auth_from_url(self.link.url))
+        elif self.link:
+            s = redact_auth_from_url(self.link.url)
+        else:
+            s = "<InstallRequirement>"
+        if self.satisfied_by is not None:
+            s += " in {}".format(display_path(self.satisfied_by.location))
+        if self.comes_from:
+            if isinstance(self.comes_from, str):
+                comes_from: Optional[str] = self.comes_from
+            else:
+                comes_from = self.comes_from.from_path()
+            if comes_from:
+                s += f" (from {comes_from})"
+        return s
+
+    def __repr__(self) -> str:
+        return "<{} object: {} editable={!r}>".format(
+            self.__class__.__name__, str(self), self.editable
+        )
+
+    def format_debug(self) -> str:
+        """An un-tested helper for getting state, for debugging."""
+        attributes = vars(self)
+        names = sorted(attributes)
+
+        state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names))
+        return "<{name} object: {{{state}}}>".format(
+            name=self.__class__.__name__,
+            state=", ".join(state),
+        )
+
+    # Things that are valid for all kinds of requirements?
+    @property
+    def name(self) -> Optional[str]:
+        if self.req is None:
+            return None
+        return self.req.name
+
+    @functools.lru_cache()  # use cached_property in python 3.8+
+    def supports_pyproject_editable(self) -> bool:
+        if not self.use_pep517:
+            return False
+        assert self.pep517_backend
+        with self.build_env:
+            runner = runner_with_spinner_message(
+                "Checking if build backend supports build_editable"
+            )
+            with self.pep517_backend.subprocess_runner(runner):
+                return "build_editable" in self.pep517_backend._supported_features()
+
+    @property
+    def specifier(self) -> SpecifierSet:
+        return self.req.specifier
+
+    @property
+    def is_pinned(self) -> bool:
+        """Return whether I am pinned to an exact version.
+
+        For example, some-package==1.2 is pinned; some-package>1.2 is not.
+        """
+        specifiers = self.specifier
+        return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
+
+    def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
+        if not extras_requested:
+            # Provide an extra to safely evaluate the markers
+            # without matching any extra
+            extras_requested = ("",)
+        if self.markers is not None:
+            return any(
+                self.markers.evaluate({"extra": extra}) for extra in extras_requested
+            )
+        else:
+            return True
+
+    @property
+    def has_hash_options(self) -> bool:
+        """Return whether any known-good hashes are specified as options.
+
+        These activate --require-hashes mode; hashes specified as part of a
+        URL do not.
+
+        """
+        return bool(self.hash_options)
+
+    def hashes(self, trust_internet: bool = True) -> Hashes:
+        """Return a hash-comparer that considers my option- and URL-based
+        hashes to be known-good.
+
+        Hashes in URLs--ones embedded in the requirements file, not ones
+        downloaded from an index server--are almost peers with ones from
+        flags. They satisfy --require-hashes (whether it was implicitly or
+        explicitly activated) but do not activate it. md5 and sha224 are not
+        allowed in flags, which should nudge people toward good algos. We
+        always OR all hashes together, even ones from URLs.
+
+        :param trust_internet: Whether to trust URL-based (#md5=...) hashes
+            downloaded from the internet, as by populate_link()
+
+        """
+        good_hashes = self.hash_options.copy()
+        link = self.link if trust_internet else self.original_link
+        if link and link.hash:
+            good_hashes.setdefault(link.hash_name, []).append(link.hash)
+        return Hashes(good_hashes)
+
+    def from_path(self) -> Optional[str]:
+        """Format a nice indicator to show where this "comes from" """
+        if self.req is None:
+            return None
+        s = str(self.req)
+        if self.comes_from:
+            if isinstance(self.comes_from, str):
+                comes_from = self.comes_from
+            else:
+                comes_from = self.comes_from.from_path()
+            if comes_from:
+                s += "->" + comes_from
+        return s
+
+    def ensure_build_location(
+        self, build_dir: str, autodelete: bool, parallel_builds: bool
+    ) -> str:
+        assert build_dir is not None
+        if self._temp_build_dir is not None:
+            assert self._temp_build_dir.path
+            return self._temp_build_dir.path
+        if self.req is None:
+            # Some systems have /tmp as a symlink which confuses custom
+            # builds (such as numpy). Thus, we ensure that the real path
+            # is returned.
+            self._temp_build_dir = TempDirectory(
+                kind=tempdir_kinds.REQ_BUILD, globally_managed=True
+            )
+
+            return self._temp_build_dir.path
+
+        # This is the only remaining place where we manually determine the path
+        # for the temporary directory. It is only needed for editables where
+        # it is the value of the --src option.
+
+        # When parallel builds are enabled, add a UUID to the build directory
+        # name so multiple builds do not interfere with each other.
+        dir_name: str = canonicalize_name(self.name)
+        if parallel_builds:
+            dir_name = f"{dir_name}_{uuid.uuid4().hex}"
+
+        # FIXME: Is there a better place to create the build_dir? (hg and bzr
+        # need this)
+        if not os.path.exists(build_dir):
+            logger.debug("Creating directory %s", build_dir)
+            os.makedirs(build_dir)
+        actual_build_dir = os.path.join(build_dir, dir_name)
+        # `None` indicates that we respect the globally-configured deletion
+        # settings, which is what we actually want when auto-deleting.
+        delete_arg = None if autodelete else False
+        return TempDirectory(
+            path=actual_build_dir,
+            delete=delete_arg,
+            kind=tempdir_kinds.REQ_BUILD,
+            globally_managed=True,
+        ).path
+
+    def _set_requirement(self) -> None:
+        """Set requirement after generating metadata."""
+        assert self.req is None
+        assert self.metadata is not None
+        assert self.source_dir is not None
+
+        # Construct a Requirement object from the generated metadata
+        if isinstance(parse_version(self.metadata["Version"]), Version):
+            op = "=="
+        else:
+            op = "==="
+
+        self.req = Requirement(
+            "".join(
+                [
+                    self.metadata["Name"],
+                    op,
+                    self.metadata["Version"],
+                ]
+            )
+        )
+
+    def warn_on_mismatching_name(self) -> None:
+        metadata_name = canonicalize_name(self.metadata["Name"])
+        if canonicalize_name(self.req.name) == metadata_name:
+            # Everything is fine.
+            return
+
+        # If we're here, there's a mismatch. Log a warning about it.
+        logger.warning(
+            "Generating metadata for package %s "
+            "produced metadata for project name %s. Fix your "
+            "#egg=%s fragments.",
+            self.name,
+            metadata_name,
+            self.name,
+        )
+        self.req = Requirement(metadata_name)
+
+    def check_if_exists(self, use_user_site: bool) -> None:
+        """Find an installed distribution that satisfies or conflicts
+        with this requirement, and set self.satisfied_by or
+        self.should_reinstall appropriately.
+        """
+        if self.req is None:
+            return
+        existing_dist = get_default_environment().get_distribution(self.req.name)
+        if not existing_dist:
+            return
+
+        version_compatible = self.req.specifier.contains(
+            existing_dist.version,
+            prereleases=True,
+        )
+        if not version_compatible:
+            self.satisfied_by = None
+            if use_user_site:
+                if existing_dist.in_usersite:
+                    self.should_reinstall = True
+                elif running_under_virtualenv() and existing_dist.in_site_packages:
+                    raise InstallationError(
+                        f"Will not install to the user site because it will "
+                        f"lack sys.path precedence to {existing_dist.raw_name} "
+                        f"in {existing_dist.location}"
+                    )
+            else:
+                self.should_reinstall = True
+        else:
+            if self.editable:
+                self.should_reinstall = True
+                # when installing editables, nothing pre-existing should ever
+                # satisfy
+                self.satisfied_by = None
+            else:
+                self.satisfied_by = existing_dist
+
+    # Things valid for wheels
+    @property
+    def is_wheel(self) -> bool:
+        if not self.link:
+            return False
+        return self.link.is_wheel
+
+    # Things valid for sdists
+    @property
+    def unpacked_source_directory(self) -> str:
+        return os.path.join(
+            self.source_dir, self.link and self.link.subdirectory_fragment or ""
+        )
+
+    @property
+    def setup_py_path(self) -> str:
+        assert self.source_dir, f"No source dir for {self}"
+        setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
+
+        return setup_py
+
+    @property
+    def setup_cfg_path(self) -> str:
+        assert self.source_dir, f"No source dir for {self}"
+        setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
+
+        return setup_cfg
+
+    @property
+    def pyproject_toml_path(self) -> str:
+        assert self.source_dir, f"No source dir for {self}"
+        return make_pyproject_path(self.unpacked_source_directory)
+
+    def load_pyproject_toml(self) -> None:
+        """Load the pyproject.toml file.
+
+        After calling this routine, all of the attributes related to PEP 517
+        processing for this requirement have been set. In particular, the
+        use_pep517 attribute can be used to determine whether we should
+        follow the PEP 517 or legacy (setup.py) code path.
+        """
+        pyproject_toml_data = load_pyproject_toml(
+            self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
+        )
+
+        if pyproject_toml_data is None:
+            self.use_pep517 = False
+            return
+
+        self.use_pep517 = True
+        requires, backend, check, backend_path = pyproject_toml_data
+        self.requirements_to_check = check
+        self.pyproject_requires = requires
+        self.pep517_backend = ConfiguredPep517HookCaller(
+            self,
+            self.unpacked_source_directory,
+            backend,
+            backend_path=backend_path,
+        )
+
+    def isolated_editable_sanity_check(self) -> None:
+        """Check that an editable requirement if valid for use with PEP 517/518.
+
+        This verifies that an editable that has a pyproject.toml either supports PEP 660
+        or as a setup.py or a setup.cfg
+        """
+        if (
+            self.editable
+            and self.use_pep517
+            and not self.supports_pyproject_editable()
+            and not os.path.isfile(self.setup_py_path)
+            and not os.path.isfile(self.setup_cfg_path)
+        ):
+            raise InstallationError(
+                f"Project {self} has a 'pyproject.toml' and its build "
+                f"backend is missing the 'build_editable' hook. Since it does not "
+                f"have a 'setup.py' nor a 'setup.cfg', "
+                f"it cannot be installed in editable mode. "
+                f"Consider using a build backend that supports PEP 660."
+            )
+
+    def prepare_metadata(self) -> None:
+        """Ensure that project metadata is available.
+
+        Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
+        Under legacy processing, call setup.py egg-info.
+        """
+        assert self.source_dir
+        details = self.name or f"from {self.link}"
+
+        if self.use_pep517:
+            assert self.pep517_backend is not None
+            if (
+                self.editable
+                and self.permit_editable_wheels
+                and self.supports_pyproject_editable()
+            ):
+                self.metadata_directory = generate_editable_metadata(
+                    build_env=self.build_env,
+                    backend=self.pep517_backend,
+                    details=details,
+                )
+            else:
+                self.metadata_directory = generate_metadata(
+                    build_env=self.build_env,
+                    backend=self.pep517_backend,
+                    details=details,
+                )
+        else:
+            self.metadata_directory = generate_metadata_legacy(
+                build_env=self.build_env,
+                setup_py_path=self.setup_py_path,
+                source_dir=self.unpacked_source_directory,
+                isolated=self.isolated,
+                details=details,
+            )
+
+        # Act on the newly generated metadata, based on the name and version.
+        if not self.name:
+            self._set_requirement()
+        else:
+            self.warn_on_mismatching_name()
+
+        self.assert_source_matches_version()
+
+    @property
+    def metadata(self) -> Any:
+        if not hasattr(self, "_metadata"):
+            self._metadata = self.get_dist().metadata
+
+        return self._metadata
+
+    def get_dist(self) -> BaseDistribution:
+        if self.metadata_directory:
+            return get_directory_distribution(self.metadata_directory)
+        elif self.local_file_path and self.is_wheel:
+            return get_wheel_distribution(
+                FilesystemWheel(self.local_file_path), canonicalize_name(self.name)
+            )
+        raise AssertionError(
+            f"InstallRequirement {self} has no metadata directory and no wheel: "
+            f"can't make a distribution."
+        )
+
+    def assert_source_matches_version(self) -> None:
+        assert self.source_dir
+        version = self.metadata["version"]
+        if self.req.specifier and version not in self.req.specifier:
+            logger.warning(
+                "Requested %s, but installing version %s",
+                self,
+                version,
+            )
+        else:
+            logger.debug(
+                "Source in %s has version %s, which satisfies requirement %s",
+                display_path(self.source_dir),
+                version,
+                self,
+            )
+
+    # For both source distributions and editables
+    def ensure_has_source_dir(
+        self,
+        parent_dir: str,
+        autodelete: bool = False,
+        parallel_builds: bool = False,
+    ) -> None:
+        """Ensure that a source_dir is set.
+
+        This will create a temporary build dir if the name of the requirement
+        isn't known yet.
+
+        :param parent_dir: The ideal pip parent_dir for the source_dir.
+            Generally src_dir for editables and build_dir for sdists.
+        :return: self.source_dir
+        """
+        if self.source_dir is None:
+            self.source_dir = self.ensure_build_location(
+                parent_dir,
+                autodelete=autodelete,
+                parallel_builds=parallel_builds,
+            )
+
+    # For editable installations
+    def update_editable(self) -> None:
+        if not self.link:
+            logger.debug(
+                "Cannot update repository at %s; repository location is unknown",
+                self.source_dir,
+            )
+            return
+        assert self.editable
+        assert self.source_dir
+        if self.link.scheme == "file":
+            # Static paths don't get updated
+            return
+        vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
+        # Editable requirements are validated in Requirement constructors.
+        # So here, if it's neither a path nor a valid VCS URL, it's a bug.
+        assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
+        hidden_url = hide_url(self.link.url)
+        vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
+
+    # Top-level Actions
+    def uninstall(
+        self, auto_confirm: bool = False, verbose: bool = False
+    ) -> Optional[UninstallPathSet]:
+        """
+        Uninstall the distribution currently satisfying this requirement.
+
+        Prompts before removing or modifying files unless
+        ``auto_confirm`` is True.
+
+        Refuses to delete or modify files outside of ``sys.prefix`` -
+        thus uninstallation within a virtual environment can only
+        modify that virtual environment, even if the virtualenv is
+        linked to global site-packages.
+
+        """
+        assert self.req
+        dist = get_default_environment().get_distribution(self.req.name)
+        if not dist:
+            logger.warning("Skipping %s as it is not installed.", self.name)
+            return None
+        logger.info("Found existing installation: %s", dist)
+
+        uninstalled_pathset = UninstallPathSet.from_dist(dist)
+        uninstalled_pathset.remove(auto_confirm, verbose)
+        return uninstalled_pathset
+
+    def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
+        def _clean_zip_name(name: str, prefix: str) -> str:
+            assert name.startswith(
+                prefix + os.path.sep
+            ), f"name {name!r} doesn't start with prefix {prefix!r}"
+            name = name[len(prefix) + 1 :]
+            name = name.replace(os.path.sep, "/")
+            return name
+
+        path = os.path.join(parentdir, path)
+        name = _clean_zip_name(path, rootdir)
+        return self.name + "/" + name
+
+    def archive(self, build_dir: Optional[str]) -> None:
+        """Saves archive to provided build_dir.
+
+        Used for saving downloaded VCS requirements as part of `pip download`.
+        """
+        assert self.source_dir
+        if build_dir is None:
+            return
+
+        create_archive = True
+        archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
+        archive_path = os.path.join(build_dir, archive_name)
+
+        if os.path.exists(archive_path):
+            response = ask_path_exists(
+                "The file {} exists. (i)gnore, (w)ipe, "
+                "(b)ackup, (a)bort ".format(display_path(archive_path)),
+                ("i", "w", "b", "a"),
+            )
+            if response == "i":
+                create_archive = False
+            elif response == "w":
+                logger.warning("Deleting %s", display_path(archive_path))
+                os.remove(archive_path)
+            elif response == "b":
+                dest_file = backup_dir(archive_path)
+                logger.warning(
+                    "Backing up %s to %s",
+                    display_path(archive_path),
+                    display_path(dest_file),
+                )
+                shutil.move(archive_path, dest_file)
+            elif response == "a":
+                sys.exit(-1)
+
+        if not create_archive:
+            return
+
+        zip_output = zipfile.ZipFile(
+            archive_path,
+            "w",
+            zipfile.ZIP_DEFLATED,
+            allowZip64=True,
+        )
+        with zip_output:
+            dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
+            for dirpath, dirnames, filenames in os.walk(dir):
+                for dirname in dirnames:
+                    dir_arcname = self._get_archive_name(
+                        dirname,
+                        parentdir=dirpath,
+                        rootdir=dir,
+                    )
+                    zipdir = zipfile.ZipInfo(dir_arcname + "/")
+                    zipdir.external_attr = 0x1ED << 16  # 0o755
+                    zip_output.writestr(zipdir, "")
+                for filename in filenames:
+                    file_arcname = self._get_archive_name(
+                        filename,
+                        parentdir=dirpath,
+                        rootdir=dir,
+                    )
+                    filename = os.path.join(dirpath, filename)
+                    zip_output.write(filename, file_arcname)
+
+        logger.info("Saved %s", display_path(archive_path))
+
+    def install(
+        self,
+        install_options: List[str],
+        global_options: Optional[Sequence[str]] = None,
+        root: Optional[str] = None,
+        home: Optional[str] = None,
+        prefix: Optional[str] = None,
+        warn_script_location: bool = True,
+        use_user_site: bool = False,
+        pycompile: bool = True,
+    ) -> None:
+        scheme = get_scheme(
+            self.name,
+            user=use_user_site,
+            home=home,
+            root=root,
+            isolated=self.isolated,
+            prefix=prefix,
+        )
+
+        global_options = global_options if global_options is not None else []
+        if self.editable and not self.is_wheel:
+            install_editable_legacy(
+                install_options,
+                global_options,
+                prefix=prefix,
+                home=home,
+                use_user_site=use_user_site,
+                name=self.name,
+                setup_py_path=self.setup_py_path,
+                isolated=self.isolated,
+                build_env=self.build_env,
+                unpacked_source_directory=self.unpacked_source_directory,
+            )
+            self.install_succeeded = True
+            return
+
+        if self.is_wheel:
+            assert self.local_file_path
+            direct_url = None
+            # TODO this can be refactored to direct_url = self.download_info
+            if self.editable:
+                direct_url = direct_url_for_editable(self.unpacked_source_directory)
+            elif self.original_link:
+                direct_url = direct_url_from_link(
+                    self.original_link,
+                    self.source_dir,
+                    self.original_link_is_in_wheel_cache,
+                )
+            install_wheel(
+                self.name,
+                self.local_file_path,
+                scheme=scheme,
+                req_description=str(self.req),
+                pycompile=pycompile,
+                warn_script_location=warn_script_location,
+                direct_url=direct_url,
+                requested=self.user_supplied,
+            )
+            self.install_succeeded = True
+            return
+
+        # TODO: Why don't we do this for editable installs?
+
+        # Extend the list of global and install options passed on to
+        # the setup.py call with the ones from the requirements file.
+        # Options specified in requirements file override those
+        # specified on the command line, since the last option given
+        # to setup.py is the one that is used.
+        global_options = list(global_options) + self.global_options
+        install_options = list(install_options) + self.install_options
+
+        try:
+            if (
+                self.legacy_install_reason is not None
+                and self.legacy_install_reason.emit_before_install
+            ):
+                self.legacy_install_reason.emit_deprecation(self.name)
+            success = install_legacy(
+                install_options=install_options,
+                global_options=global_options,
+                root=root,
+                home=home,
+                prefix=prefix,
+                use_user_site=use_user_site,
+                pycompile=pycompile,
+                scheme=scheme,
+                setup_py_path=self.setup_py_path,
+                isolated=self.isolated,
+                req_name=self.name,
+                build_env=self.build_env,
+                unpacked_source_directory=self.unpacked_source_directory,
+                req_description=str(self.req),
+            )
+        except LegacyInstallFailure as exc:
+            self.install_succeeded = False
+            raise exc
+        except Exception:
+            self.install_succeeded = True
+            raise
+
+        self.install_succeeded = success
+
+        if (
+            success
+            and self.legacy_install_reason is not None
+            and self.legacy_install_reason.emit_after_success
+        ):
+            self.legacy_install_reason.emit_deprecation(self.name)
+
+
+def check_invalid_constraint_type(req: InstallRequirement) -> str:
+
+    # Check for unsupported forms
+    problem = ""
+    if not req.name:
+        problem = "Unnamed requirements are not allowed as constraints"
+    elif req.editable:
+        problem = "Editable requirements are not allowed as constraints"
+    elif req.extras:
+        problem = "Constraints cannot have extras"
+
+    if problem:
+        deprecated(
+            reason=(
+                "Constraints are only allowed to take the form of a package "
+                "name and a version specifier. Other forms were originally "
+                "permitted as an accident of the implementation, but were "
+                "undocumented. The new implementation of the resolver no "
+                "longer supports these forms."
+            ),
+            replacement="replacing the constraint with a requirement",
+            # No plan yet for when the new resolver becomes default
+            gone_in=None,
+            issue=8210,
+        )
+
+    return problem
+
+
+def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool:
+    if getattr(options, option, None):
+        return True
+    for req in reqs:
+        if getattr(req, option, None):
+            return True
+    return False
+
+
+def _install_option_ignored(
+    install_options: List[str], reqs: List[InstallRequirement]
+) -> bool:
+    for req in reqs:
+        if (install_options or req.install_options) and not req.use_pep517:
+            return False
+    return True
+
+
+class LegacySetupPyOptionsCheckMode(Enum):
+    INSTALL = 1
+    WHEEL = 2
+    DOWNLOAD = 3
+
+
+def check_legacy_setup_py_options(
+    options: Values,
+    reqs: List[InstallRequirement],
+    mode: LegacySetupPyOptionsCheckMode,
+) -> None:
+    has_install_options = _has_option(options, reqs, "install_options")
+    has_build_options = _has_option(options, reqs, "build_options")
+    has_global_options = _has_option(options, reqs, "global_options")
+    legacy_setup_py_options_present = (
+        has_install_options or has_build_options or has_global_options
+    )
+    if not legacy_setup_py_options_present:
+        return
+
+    options.format_control.disallow_binaries()
+    logger.warning(
+        "Implying --no-binary=:all: due to the presence of "
+        "--build-option / --global-option / --install-option. "
+        "Consider using --config-settings for more flexibility.",
+    )
+    if mode == LegacySetupPyOptionsCheckMode.INSTALL and has_install_options:
+        if _install_option_ignored(options.install_options, reqs):
+            logger.warning(
+                "Ignoring --install-option when building using PEP 517",
+            )
+        else:
+            deprecated(
+                reason=(
+                    "--install-option is deprecated because "
+                    "it forces pip to use the 'setup.py install' "
+                    "command which is itself deprecated."
+                ),
+                issue=11358,
+                replacement="to use --config-settings",
+                gone_in="23.1",
+            )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/req_set.py b/venv/lib/python3.9/site-packages/pip/_internal/req/req_set.py
new file mode 100644
index 0000000..ec7a6e0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/req_set.py
@@ -0,0 +1,82 @@
+import logging
+from collections import OrderedDict
+from typing import Dict, List
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.req.req_install import InstallRequirement
+
+logger = logging.getLogger(__name__)
+
+
+class RequirementSet:
+    def __init__(self, check_supported_wheels: bool = True) -> None:
+        """Create a RequirementSet."""
+
+        self.requirements: Dict[str, InstallRequirement] = OrderedDict()
+        self.check_supported_wheels = check_supported_wheels
+
+        self.unnamed_requirements: List[InstallRequirement] = []
+
+    def __str__(self) -> str:
+        requirements = sorted(
+            (req for req in self.requirements.values() if not req.comes_from),
+            key=lambda req: canonicalize_name(req.name or ""),
+        )
+        return " ".join(str(req.req) for req in requirements)
+
+    def __repr__(self) -> str:
+        requirements = sorted(
+            self.requirements.values(),
+            key=lambda req: canonicalize_name(req.name or ""),
+        )
+
+        format_string = "<{classname} object; {count} requirement(s): {reqs}>"
+        return format_string.format(
+            classname=self.__class__.__name__,
+            count=len(requirements),
+            reqs=", ".join(str(req.req) for req in requirements),
+        )
+
+    def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
+        assert not install_req.name
+        self.unnamed_requirements.append(install_req)
+
+    def add_named_requirement(self, install_req: InstallRequirement) -> None:
+        assert install_req.name
+
+        project_name = canonicalize_name(install_req.name)
+        self.requirements[project_name] = install_req
+
+    def has_requirement(self, name: str) -> bool:
+        project_name = canonicalize_name(name)
+
+        return (
+            project_name in self.requirements
+            and not self.requirements[project_name].constraint
+        )
+
+    def get_requirement(self, name: str) -> InstallRequirement:
+        project_name = canonicalize_name(name)
+
+        if project_name in self.requirements:
+            return self.requirements[project_name]
+
+        raise KeyError(f"No project with the name {name!r}")
+
+    @property
+    def all_requirements(self) -> List[InstallRequirement]:
+        return self.unnamed_requirements + list(self.requirements.values())
+
+    @property
+    def requirements_to_install(self) -> List[InstallRequirement]:
+        """Return the list of requirements that need to be installed.
+
+        TODO remove this property together with the legacy resolver, since the new
+             resolver only returns requirements that need to be installed.
+        """
+        return [
+            install_req
+            for install_req in self.all_requirements
+            if not install_req.constraint and not install_req.satisfied_by
+        ]
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/req/req_uninstall.py b/venv/lib/python3.9/site-packages/pip/_internal/req/req_uninstall.py
new file mode 100644
index 0000000..15b6738
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/req/req_uninstall.py
@@ -0,0 +1,640 @@
+import functools
+import os
+import sys
+import sysconfig
+from importlib.util import cache_from_source
+from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Set, Tuple
+
+from pip._internal.exceptions import UninstallationError
+from pip._internal.locations import get_bin_prefix, get_bin_user
+from pip._internal.metadata import BaseDistribution
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.egg_link import egg_link_path_from_location
+from pip._internal.utils.logging import getLogger, indent_log
+from pip._internal.utils.misc import ask, is_local, normalize_path, renames, rmtree
+from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
+
+logger = getLogger(__name__)
+
+
+def _script_names(
+    bin_dir: str, script_name: str, is_gui: bool
+) -> Generator[str, None, None]:
+    """Create the fully qualified name of the files created by
+    {console,gui}_scripts for the given ``dist``.
+    Returns the list of file names
+    """
+    exe_name = os.path.join(bin_dir, script_name)
+    yield exe_name
+    if not WINDOWS:
+        return
+    yield f"{exe_name}.exe"
+    yield f"{exe_name}.exe.manifest"
+    if is_gui:
+        yield f"{exe_name}-script.pyw"
+    else:
+        yield f"{exe_name}-script.py"
+
+
+def _unique(
+    fn: Callable[..., Generator[Any, None, None]]
+) -> Callable[..., Generator[Any, None, None]]:
+    @functools.wraps(fn)
+    def unique(*args: Any, **kw: Any) -> Generator[Any, None, None]:
+        seen: Set[Any] = set()
+        for item in fn(*args, **kw):
+            if item not in seen:
+                seen.add(item)
+                yield item
+
+    return unique
+
+
+@_unique
+def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]:
+    """
+    Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
+
+    Yield paths to all the files in RECORD. For each .py file in RECORD, add
+    the .pyc and .pyo in the same directory.
+
+    UninstallPathSet.add() takes care of the __pycache__ .py[co].
+
+    If RECORD is not found, raises UninstallationError,
+    with possible information from the INSTALLER file.
+
+    https://packaging.python.org/specifications/recording-installed-packages/
+    """
+    location = dist.location
+    assert location is not None, "not installed"
+
+    entries = dist.iter_declared_entries()
+    if entries is None:
+        msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist)
+        installer = dist.installer
+        if not installer or installer == "pip":
+            dep = "{}=={}".format(dist.raw_name, dist.version)
+            msg += (
+                " You might be able to recover from this via: "
+                "'pip install --force-reinstall --no-deps {}'.".format(dep)
+            )
+        else:
+            msg += " Hint: The package was installed by {}.".format(installer)
+        raise UninstallationError(msg)
+
+    for entry in entries:
+        path = os.path.join(location, entry)
+        yield path
+        if path.endswith(".py"):
+            dn, fn = os.path.split(path)
+            base = fn[:-3]
+            path = os.path.join(dn, base + ".pyc")
+            yield path
+            path = os.path.join(dn, base + ".pyo")
+            yield path
+
+
+def compact(paths: Iterable[str]) -> Set[str]:
+    """Compact a path set to contain the minimal number of paths
+    necessary to contain all paths in the set. If /a/path/ and
+    /a/path/to/a/file.txt are both in the set, leave only the
+    shorter path."""
+
+    sep = os.path.sep
+    short_paths: Set[str] = set()
+    for path in sorted(paths, key=len):
+        should_skip = any(
+            path.startswith(shortpath.rstrip("*"))
+            and path[len(shortpath.rstrip("*").rstrip(sep))] == sep
+            for shortpath in short_paths
+        )
+        if not should_skip:
+            short_paths.add(path)
+    return short_paths
+
+
+def compress_for_rename(paths: Iterable[str]) -> Set[str]:
+    """Returns a set containing the paths that need to be renamed.
+
+    This set may include directories when the original sequence of paths
+    included every file on disk.
+    """
+    case_map = {os.path.normcase(p): p for p in paths}
+    remaining = set(case_map)
+    unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len)
+    wildcards: Set[str] = set()
+
+    def norm_join(*a: str) -> str:
+        return os.path.normcase(os.path.join(*a))
+
+    for root in unchecked:
+        if any(os.path.normcase(root).startswith(w) for w in wildcards):
+            # This directory has already been handled.
+            continue
+
+        all_files: Set[str] = set()
+        all_subdirs: Set[str] = set()
+        for dirname, subdirs, files in os.walk(root):
+            all_subdirs.update(norm_join(root, dirname, d) for d in subdirs)
+            all_files.update(norm_join(root, dirname, f) for f in files)
+        # If all the files we found are in our remaining set of files to
+        # remove, then remove them from the latter set and add a wildcard
+        # for the directory.
+        if not (all_files - remaining):
+            remaining.difference_update(all_files)
+            wildcards.add(root + os.sep)
+
+    return set(map(case_map.__getitem__, remaining)) | wildcards
+
+
+def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]:
+    """Returns a tuple of 2 sets of which paths to display to user
+
+    The first set contains paths that would be deleted. Files of a package
+    are not added and the top-level directory of the package has a '*' added
+    at the end - to signify that all it's contents are removed.
+
+    The second set contains files that would have been skipped in the above
+    folders.
+    """
+
+    will_remove = set(paths)
+    will_skip = set()
+
+    # Determine folders and files
+    folders = set()
+    files = set()
+    for path in will_remove:
+        if path.endswith(".pyc"):
+            continue
+        if path.endswith("__init__.py") or ".dist-info" in path:
+            folders.add(os.path.dirname(path))
+        files.add(path)
+
+    # probably this one https://github.com/python/mypy/issues/390
+    _normcased_files = set(map(os.path.normcase, files))  # type: ignore
+
+    folders = compact(folders)
+
+    # This walks the tree using os.walk to not miss extra folders
+    # that might get added.
+    for folder in folders:
+        for dirpath, _, dirfiles in os.walk(folder):
+            for fname in dirfiles:
+                if fname.endswith(".pyc"):
+                    continue
+
+                file_ = os.path.join(dirpath, fname)
+                if (
+                    os.path.isfile(file_)
+                    and os.path.normcase(file_) not in _normcased_files
+                ):
+                    # We are skipping this file. Add it to the set.
+                    will_skip.add(file_)
+
+    will_remove = files | {os.path.join(folder, "*") for folder in folders}
+
+    return will_remove, will_skip
+
+
+class StashedUninstallPathSet:
+    """A set of file rename operations to stash files while
+    tentatively uninstalling them."""
+
+    def __init__(self) -> None:
+        # Mapping from source file root to [Adjacent]TempDirectory
+        # for files under that directory.
+        self._save_dirs: Dict[str, TempDirectory] = {}
+        # (old path, new path) tuples for each move that may need
+        # to be undone.
+        self._moves: List[Tuple[str, str]] = []
+
+    def _get_directory_stash(self, path: str) -> str:
+        """Stashes a directory.
+
+        Directories are stashed adjacent to their original location if
+        possible, or else moved/copied into the user's temp dir."""
+
+        try:
+            save_dir: TempDirectory = AdjacentTempDirectory(path)
+        except OSError:
+            save_dir = TempDirectory(kind="uninstall")
+        self._save_dirs[os.path.normcase(path)] = save_dir
+
+        return save_dir.path
+
+    def _get_file_stash(self, path: str) -> str:
+        """Stashes a file.
+
+        If no root has been provided, one will be created for the directory
+        in the user's temp directory."""
+        path = os.path.normcase(path)
+        head, old_head = os.path.dirname(path), None
+        save_dir = None
+
+        while head != old_head:
+            try:
+                save_dir = self._save_dirs[head]
+                break
+            except KeyError:
+                pass
+            head, old_head = os.path.dirname(head), head
+        else:
+            # Did not find any suitable root
+            head = os.path.dirname(path)
+            save_dir = TempDirectory(kind="uninstall")
+            self._save_dirs[head] = save_dir
+
+        relpath = os.path.relpath(path, head)
+        if relpath and relpath != os.path.curdir:
+            return os.path.join(save_dir.path, relpath)
+        return save_dir.path
+
+    def stash(self, path: str) -> str:
+        """Stashes the directory or file and returns its new location.
+        Handle symlinks as files to avoid modifying the symlink targets.
+        """
+        path_is_dir = os.path.isdir(path) and not os.path.islink(path)
+        if path_is_dir:
+            new_path = self._get_directory_stash(path)
+        else:
+            new_path = self._get_file_stash(path)
+
+        self._moves.append((path, new_path))
+        if path_is_dir and os.path.isdir(new_path):
+            # If we're moving a directory, we need to
+            # remove the destination first or else it will be
+            # moved to inside the existing directory.
+            # We just created new_path ourselves, so it will
+            # be removable.
+            os.rmdir(new_path)
+        renames(path, new_path)
+        return new_path
+
+    def commit(self) -> None:
+        """Commits the uninstall by removing stashed files."""
+        for _, save_dir in self._save_dirs.items():
+            save_dir.cleanup()
+        self._moves = []
+        self._save_dirs = {}
+
+    def rollback(self) -> None:
+        """Undoes the uninstall by moving stashed files back."""
+        for p in self._moves:
+            logger.info("Moving to %s\n from %s", *p)
+
+        for new_path, path in self._moves:
+            try:
+                logger.debug("Replacing %s from %s", new_path, path)
+                if os.path.isfile(new_path) or os.path.islink(new_path):
+                    os.unlink(new_path)
+                elif os.path.isdir(new_path):
+                    rmtree(new_path)
+                renames(path, new_path)
+            except OSError as ex:
+                logger.error("Failed to restore %s", new_path)
+                logger.debug("Exception: %s", ex)
+
+        self.commit()
+
+    @property
+    def can_rollback(self) -> bool:
+        return bool(self._moves)
+
+
+class UninstallPathSet:
+    """A set of file paths to be removed in the uninstallation of a
+    requirement."""
+
+    def __init__(self, dist: BaseDistribution) -> None:
+        self._paths: Set[str] = set()
+        self._refuse: Set[str] = set()
+        self._pth: Dict[str, UninstallPthEntries] = {}
+        self._dist = dist
+        self._moved_paths = StashedUninstallPathSet()
+
+    def _permitted(self, path: str) -> bool:
+        """
+        Return True if the given path is one we are permitted to
+        remove/modify, False otherwise.
+
+        """
+        return is_local(path)
+
+    def add(self, path: str) -> None:
+        head, tail = os.path.split(path)
+
+        # we normalize the head to resolve parent directory symlinks, but not
+        # the tail, since we only want to uninstall symlinks, not their targets
+        path = os.path.join(normalize_path(head), os.path.normcase(tail))
+
+        if not os.path.exists(path):
+            return
+        if self._permitted(path):
+            self._paths.add(path)
+        else:
+            self._refuse.add(path)
+
+        # __pycache__ files can show up after 'installed-files.txt' is created,
+        # due to imports
+        if os.path.splitext(path)[1] == ".py":
+            self.add(cache_from_source(path))
+
+    def add_pth(self, pth_file: str, entry: str) -> None:
+        pth_file = normalize_path(pth_file)
+        if self._permitted(pth_file):
+            if pth_file not in self._pth:
+                self._pth[pth_file] = UninstallPthEntries(pth_file)
+            self._pth[pth_file].add(entry)
+        else:
+            self._refuse.add(pth_file)
+
+    def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None:
+        """Remove paths in ``self._paths`` with confirmation (unless
+        ``auto_confirm`` is True)."""
+
+        if not self._paths:
+            logger.info(
+                "Can't uninstall '%s'. No files were found to uninstall.",
+                self._dist.raw_name,
+            )
+            return
+
+        dist_name_version = f"{self._dist.raw_name}-{self._dist.version}"
+        logger.info("Uninstalling %s:", dist_name_version)
+
+        with indent_log():
+            if auto_confirm or self._allowed_to_proceed(verbose):
+                moved = self._moved_paths
+
+                for_rename = compress_for_rename(self._paths)
+
+                for path in sorted(compact(for_rename)):
+                    moved.stash(path)
+                    logger.verbose("Removing file or directory %s", path)
+
+                for pth in self._pth.values():
+                    pth.remove()
+
+                logger.info("Successfully uninstalled %s", dist_name_version)
+
+    def _allowed_to_proceed(self, verbose: bool) -> bool:
+        """Display which files would be deleted and prompt for confirmation"""
+
+        def _display(msg: str, paths: Iterable[str]) -> None:
+            if not paths:
+                return
+
+            logger.info(msg)
+            with indent_log():
+                for path in sorted(compact(paths)):
+                    logger.info(path)
+
+        if not verbose:
+            will_remove, will_skip = compress_for_output_listing(self._paths)
+        else:
+            # In verbose mode, display all the files that are going to be
+            # deleted.
+            will_remove = set(self._paths)
+            will_skip = set()
+
+        _display("Would remove:", will_remove)
+        _display("Would not remove (might be manually added):", will_skip)
+        _display("Would not remove (outside of prefix):", self._refuse)
+        if verbose:
+            _display("Will actually move:", compress_for_rename(self._paths))
+
+        return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n"
+
+    def rollback(self) -> None:
+        """Rollback the changes previously made by remove()."""
+        if not self._moved_paths.can_rollback:
+            logger.error(
+                "Can't roll back %s; was not uninstalled",
+                self._dist.raw_name,
+            )
+            return
+        logger.info("Rolling back uninstall of %s", self._dist.raw_name)
+        self._moved_paths.rollback()
+        for pth in self._pth.values():
+            pth.rollback()
+
+    def commit(self) -> None:
+        """Remove temporary save dir: rollback will no longer be possible."""
+        self._moved_paths.commit()
+
+    @classmethod
+    def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet":
+        dist_location = dist.location
+        info_location = dist.info_location
+        if dist_location is None:
+            logger.info(
+                "Not uninstalling %s since it is not installed",
+                dist.canonical_name,
+            )
+            return cls(dist)
+
+        normalized_dist_location = normalize_path(dist_location)
+        if not dist.local:
+            logger.info(
+                "Not uninstalling %s at %s, outside environment %s",
+                dist.canonical_name,
+                normalized_dist_location,
+                sys.prefix,
+            )
+            return cls(dist)
+
+        if normalized_dist_location in {
+            p
+            for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")}
+            if p
+        }:
+            logger.info(
+                "Not uninstalling %s at %s, as it is in the standard library.",
+                dist.canonical_name,
+                normalized_dist_location,
+            )
+            return cls(dist)
+
+        paths_to_remove = cls(dist)
+        develop_egg_link = egg_link_path_from_location(dist.raw_name)
+
+        # Distribution is installed with metadata in a "flat" .egg-info
+        # directory. This means it is not a modern .dist-info installation, an
+        # egg, or legacy editable.
+        setuptools_flat_installation = (
+            dist.installed_with_setuptools_egg_info
+            and info_location is not None
+            and os.path.exists(info_location)
+            # If dist is editable and the location points to a ``.egg-info``,
+            # we are in fact in the legacy editable case.
+            and not info_location.endswith(f"{dist.setuptools_filename}.egg-info")
+        )
+
+        # Uninstall cases order do matter as in the case of 2 installs of the
+        # same package, pip needs to uninstall the currently detected version
+        if setuptools_flat_installation:
+            if info_location is not None:
+                paths_to_remove.add(info_location)
+            installed_files = dist.iter_declared_entries()
+            if installed_files is not None:
+                for installed_file in installed_files:
+                    paths_to_remove.add(os.path.join(dist_location, installed_file))
+            # FIXME: need a test for this elif block
+            # occurs with --single-version-externally-managed/--record outside
+            # of pip
+            elif dist.is_file("top_level.txt"):
+                try:
+                    namespace_packages = dist.read_text("namespace_packages.txt")
+                except FileNotFoundError:
+                    namespaces = []
+                else:
+                    namespaces = namespace_packages.splitlines(keepends=False)
+                for top_level_pkg in [
+                    p
+                    for p in dist.read_text("top_level.txt").splitlines()
+                    if p and p not in namespaces
+                ]:
+                    path = os.path.join(dist_location, top_level_pkg)
+                    paths_to_remove.add(path)
+                    paths_to_remove.add(f"{path}.py")
+                    paths_to_remove.add(f"{path}.pyc")
+                    paths_to_remove.add(f"{path}.pyo")
+
+        elif dist.installed_by_distutils:
+            raise UninstallationError(
+                "Cannot uninstall {!r}. It is a distutils installed project "
+                "and thus we cannot accurately determine which files belong "
+                "to it which would lead to only a partial uninstall.".format(
+                    dist.raw_name,
+                )
+            )
+
+        elif dist.installed_as_egg:
+            # package installed by easy_install
+            # We cannot match on dist.egg_name because it can slightly vary
+            # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
+            paths_to_remove.add(dist_location)
+            easy_install_egg = os.path.split(dist_location)[1]
+            easy_install_pth = os.path.join(
+                os.path.dirname(dist_location),
+                "easy-install.pth",
+            )
+            paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg)
+
+        elif dist.installed_with_dist_info:
+            for path in uninstallation_paths(dist):
+                paths_to_remove.add(path)
+
+        elif develop_egg_link:
+            # PEP 660 modern editable is handled in the ``.dist-info`` case
+            # above, so this only covers the setuptools-style editable.
+            with open(develop_egg_link) as fh:
+                link_pointer = os.path.normcase(fh.readline().strip())
+                normalized_link_pointer = normalize_path(link_pointer)
+            assert os.path.samefile(
+                normalized_link_pointer, normalized_dist_location
+            ), (
+                f"Egg-link {link_pointer} does not match installed location of "
+                f"{dist.raw_name} (at {dist_location})"
+            )
+            paths_to_remove.add(develop_egg_link)
+            easy_install_pth = os.path.join(
+                os.path.dirname(develop_egg_link), "easy-install.pth"
+            )
+            paths_to_remove.add_pth(easy_install_pth, dist_location)
+
+        else:
+            logger.debug(
+                "Not sure how to uninstall: %s - Check: %s",
+                dist,
+                dist_location,
+            )
+
+        if dist.in_usersite:
+            bin_dir = get_bin_user()
+        else:
+            bin_dir = get_bin_prefix()
+
+        # find distutils scripts= scripts
+        try:
+            for script in dist.iter_distutils_script_names():
+                paths_to_remove.add(os.path.join(bin_dir, script))
+                if WINDOWS:
+                    paths_to_remove.add(os.path.join(bin_dir, f"{script}.bat"))
+        except (FileNotFoundError, NotADirectoryError):
+            pass
+
+        # find console_scripts and gui_scripts
+        def iter_scripts_to_remove(
+            dist: BaseDistribution,
+            bin_dir: str,
+        ) -> Generator[str, None, None]:
+            for entry_point in dist.iter_entry_points():
+                if entry_point.group == "console_scripts":
+                    yield from _script_names(bin_dir, entry_point.name, False)
+                elif entry_point.group == "gui_scripts":
+                    yield from _script_names(bin_dir, entry_point.name, True)
+
+        for s in iter_scripts_to_remove(dist, bin_dir):
+            paths_to_remove.add(s)
+
+        return paths_to_remove
+
+
+class UninstallPthEntries:
+    def __init__(self, pth_file: str) -> None:
+        self.file = pth_file
+        self.entries: Set[str] = set()
+        self._saved_lines: Optional[List[bytes]] = None
+
+    def add(self, entry: str) -> None:
+        entry = os.path.normcase(entry)
+        # On Windows, os.path.normcase converts the entry to use
+        # backslashes.  This is correct for entries that describe absolute
+        # paths outside of site-packages, but all the others use forward
+        # slashes.
+        # os.path.splitdrive is used instead of os.path.isabs because isabs
+        # treats non-absolute paths with drive letter markings like c:foo\bar
+        # as absolute paths. It also does not recognize UNC paths if they don't
+        # have more than "\\sever\share". Valid examples: "\\server\share\" or
+        # "\\server\share\folder".
+        if WINDOWS and not os.path.splitdrive(entry)[0]:
+            entry = entry.replace("\\", "/")
+        self.entries.add(entry)
+
+    def remove(self) -> None:
+        logger.verbose("Removing pth entries from %s:", self.file)
+
+        # If the file doesn't exist, log a warning and return
+        if not os.path.isfile(self.file):
+            logger.warning("Cannot remove entries from nonexistent file %s", self.file)
+            return
+        with open(self.file, "rb") as fh:
+            # windows uses '\r\n' with py3k, but uses '\n' with py2.x
+            lines = fh.readlines()
+            self._saved_lines = lines
+        if any(b"\r\n" in line for line in lines):
+            endline = "\r\n"
+        else:
+            endline = "\n"
+        # handle missing trailing newline
+        if lines and not lines[-1].endswith(endline.encode("utf-8")):
+            lines[-1] = lines[-1] + endline.encode("utf-8")
+        for entry in self.entries:
+            try:
+                logger.verbose("Removing entry: %s", entry)
+                lines.remove((entry + endline).encode("utf-8"))
+            except ValueError:
+                pass
+        with open(self.file, "wb") as fh:
+            fh.writelines(lines)
+
+    def rollback(self) -> bool:
+        if self._saved_lines is None:
+            logger.error("Cannot roll back changes to %s, none were made", self.file)
+            return False
+        logger.debug("Rolling %s back to previous state", self.file)
+        with open(self.file, "wb") as fh:
+            fh.writelines(self._saved_lines)
+        return True
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/__init__.py
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..836e2a2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/base.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/base.cpython-39.pyc
new file mode 100644
index 0000000..072bc26
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/__pycache__/base.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/base.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/base.py
new file mode 100644
index 0000000..42dade1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/base.py
@@ -0,0 +1,20 @@
+from typing import Callable, List, Optional
+
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.req.req_set import RequirementSet
+
+InstallRequirementProvider = Callable[
+    [str, Optional[InstallRequirement]], InstallRequirement
+]
+
+
+class BaseResolver:
+    def resolve(
+        self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
+    ) -> RequirementSet:
+        raise NotImplementedError()
+
+    def get_installation_order(
+        self, req_set: RequirementSet
+    ) -> List[InstallRequirement]:
+        raise NotImplementedError()
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__init__.py
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..f7dfa5b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc
new file mode 100644
index 0000000..4bf56ff
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/resolver.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/resolver.py
new file mode 100644
index 0000000..fb49d41
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/legacy/resolver.py
@@ -0,0 +1,600 @@
+"""Dependency Resolution
+
+The dependency resolution in pip is performed as follows:
+
+for top-level requirements:
+    a. only one spec allowed per project, regardless of conflicts or not.
+       otherwise a "double requirement" exception is raised
+    b. they override sub-dependency requirements.
+for sub-dependencies
+    a. "first found, wins" (where the order is breadth first)
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import logging
+import sys
+from collections import defaultdict
+from itertools import chain
+from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
+
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.requirements import Requirement
+
+from pip._internal.cache import WheelCache
+from pip._internal.exceptions import (
+    BestVersionAlreadyInstalled,
+    DistributionNotFound,
+    HashError,
+    HashErrors,
+    InstallationError,
+    NoneMetadataError,
+    UnsupportedPythonVersion,
+)
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.req_install import (
+    InstallRequirement,
+    check_invalid_constraint_type,
+)
+from pip._internal.req.req_set import RequirementSet
+from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
+from pip._internal.utils import compatibility_tags
+from pip._internal.utils.compatibility_tags import get_supported
+from pip._internal.utils.direct_url_helpers import direct_url_from_link
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import normalize_version_info
+from pip._internal.utils.packaging import check_requires_python
+
+logger = logging.getLogger(__name__)
+
+DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]]
+
+
+def _check_dist_requires_python(
+    dist: BaseDistribution,
+    version_info: Tuple[int, int, int],
+    ignore_requires_python: bool = False,
+) -> None:
+    """
+    Check whether the given Python version is compatible with a distribution's
+    "Requires-Python" value.
+
+    :param version_info: A 3-tuple of ints representing the Python
+        major-minor-micro version to check.
+    :param ignore_requires_python: Whether to ignore the "Requires-Python"
+        value if the given Python version isn't compatible.
+
+    :raises UnsupportedPythonVersion: When the given Python version isn't
+        compatible.
+    """
+    # This idiosyncratically converts the SpecifierSet to str and let
+    # check_requires_python then parse it again into SpecifierSet. But this
+    # is the legacy resolver so I'm just not going to bother refactoring.
+    try:
+        requires_python = str(dist.requires_python)
+    except FileNotFoundError as e:
+        raise NoneMetadataError(dist, str(e))
+    try:
+        is_compatible = check_requires_python(
+            requires_python,
+            version_info=version_info,
+        )
+    except specifiers.InvalidSpecifier as exc:
+        logger.warning(
+            "Package %r has an invalid Requires-Python: %s", dist.raw_name, exc
+        )
+        return
+
+    if is_compatible:
+        return
+
+    version = ".".join(map(str, version_info))
+    if ignore_requires_python:
+        logger.debug(
+            "Ignoring failed Requires-Python check for package %r: %s not in %r",
+            dist.raw_name,
+            version,
+            requires_python,
+        )
+        return
+
+    raise UnsupportedPythonVersion(
+        "Package {!r} requires a different Python: {} not in {!r}".format(
+            dist.raw_name, version, requires_python
+        )
+    )
+
+
+class Resolver(BaseResolver):
+    """Resolves which packages need to be installed/uninstalled to perform \
+    the requested operation without breaking the requirements of any package.
+    """
+
+    _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
+
+    def __init__(
+        self,
+        preparer: RequirementPreparer,
+        finder: PackageFinder,
+        wheel_cache: Optional[WheelCache],
+        make_install_req: InstallRequirementProvider,
+        use_user_site: bool,
+        ignore_dependencies: bool,
+        ignore_installed: bool,
+        ignore_requires_python: bool,
+        force_reinstall: bool,
+        upgrade_strategy: str,
+        py_version_info: Optional[Tuple[int, ...]] = None,
+    ) -> None:
+        super().__init__()
+        assert upgrade_strategy in self._allowed_strategies
+
+        if py_version_info is None:
+            py_version_info = sys.version_info[:3]
+        else:
+            py_version_info = normalize_version_info(py_version_info)
+
+        self._py_version_info = py_version_info
+
+        self.preparer = preparer
+        self.finder = finder
+        self.wheel_cache = wheel_cache
+
+        self.upgrade_strategy = upgrade_strategy
+        self.force_reinstall = force_reinstall
+        self.ignore_dependencies = ignore_dependencies
+        self.ignore_installed = ignore_installed
+        self.ignore_requires_python = ignore_requires_python
+        self.use_user_site = use_user_site
+        self._make_install_req = make_install_req
+
+        self._discovered_dependencies: DiscoveredDependencies = defaultdict(list)
+
+    def resolve(
+        self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
+    ) -> RequirementSet:
+        """Resolve what operations need to be done
+
+        As a side-effect of this method, the packages (and their dependencies)
+        are downloaded, unpacked and prepared for installation. This
+        preparation is done by ``pip.operations.prepare``.
+
+        Once PyPI has static dependency metadata available, it would be
+        possible to move the preparation to become a step separated from
+        dependency resolution.
+        """
+        requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels)
+        for req in root_reqs:
+            if req.constraint:
+                check_invalid_constraint_type(req)
+            self._add_requirement_to_set(requirement_set, req)
+
+        # Actually prepare the files, and collect any exceptions. Most hash
+        # exceptions cannot be checked ahead of time, because
+        # _populate_link() needs to be called before we can make decisions
+        # based on link type.
+        discovered_reqs: List[InstallRequirement] = []
+        hash_errors = HashErrors()
+        for req in chain(requirement_set.all_requirements, discovered_reqs):
+            try:
+                discovered_reqs.extend(self._resolve_one(requirement_set, req))
+            except HashError as exc:
+                exc.req = req
+                hash_errors.append(exc)
+
+        if hash_errors:
+            raise hash_errors
+
+        return requirement_set
+
+    def _add_requirement_to_set(
+        self,
+        requirement_set: RequirementSet,
+        install_req: InstallRequirement,
+        parent_req_name: Optional[str] = None,
+        extras_requested: Optional[Iterable[str]] = None,
+    ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]:
+        """Add install_req as a requirement to install.
+
+        :param parent_req_name: The name of the requirement that needed this
+            added. The name is used because when multiple unnamed requirements
+            resolve to the same name, we could otherwise end up with dependency
+            links that point outside the Requirements set. parent_req must
+            already be added. Note that None implies that this is a user
+            supplied requirement, vs an inferred one.
+        :param extras_requested: an iterable of extras used to evaluate the
+            environment markers.
+        :return: Additional requirements to scan. That is either [] if
+            the requirement is not applicable, or [install_req] if the
+            requirement is applicable and has just been added.
+        """
+        # If the markers do not match, ignore this requirement.
+        if not install_req.match_markers(extras_requested):
+            logger.info(
+                "Ignoring %s: markers '%s' don't match your environment",
+                install_req.name,
+                install_req.markers,
+            )
+            return [], None
+
+        # If the wheel is not supported, raise an error.
+        # Should check this after filtering out based on environment markers to
+        # allow specifying different wheels based on the environment/OS, in a
+        # single requirements file.
+        if install_req.link and install_req.link.is_wheel:
+            wheel = Wheel(install_req.link.filename)
+            tags = compatibility_tags.get_supported()
+            if requirement_set.check_supported_wheels and not wheel.supported(tags):
+                raise InstallationError(
+                    "{} is not a supported wheel on this platform.".format(
+                        wheel.filename
+                    )
+                )
+
+        # This next bit is really a sanity check.
+        assert (
+            not install_req.user_supplied or parent_req_name is None
+        ), "a user supplied req shouldn't have a parent"
+
+        # Unnamed requirements are scanned again and the requirement won't be
+        # added as a dependency until after scanning.
+        if not install_req.name:
+            requirement_set.add_unnamed_requirement(install_req)
+            return [install_req], None
+
+        try:
+            existing_req: Optional[
+                InstallRequirement
+            ] = requirement_set.get_requirement(install_req.name)
+        except KeyError:
+            existing_req = None
+
+        has_conflicting_requirement = (
+            parent_req_name is None
+            and existing_req
+            and not existing_req.constraint
+            and existing_req.extras == install_req.extras
+            and existing_req.req
+            and install_req.req
+            and existing_req.req.specifier != install_req.req.specifier
+        )
+        if has_conflicting_requirement:
+            raise InstallationError(
+                "Double requirement given: {} (already in {}, name={!r})".format(
+                    install_req, existing_req, install_req.name
+                )
+            )
+
+        # When no existing requirement exists, add the requirement as a
+        # dependency and it will be scanned again after.
+        if not existing_req:
+            requirement_set.add_named_requirement(install_req)
+            # We'd want to rescan this requirement later
+            return [install_req], install_req
+
+        # Assume there's no need to scan, and that we've already
+        # encountered this for scanning.
+        if install_req.constraint or not existing_req.constraint:
+            return [], existing_req
+
+        does_not_satisfy_constraint = install_req.link and not (
+            existing_req.link and install_req.link.path == existing_req.link.path
+        )
+        if does_not_satisfy_constraint:
+            raise InstallationError(
+                "Could not satisfy constraints for '{}': "
+                "installation from path or url cannot be "
+                "constrained to a version".format(install_req.name)
+            )
+        # If we're now installing a constraint, mark the existing
+        # object for real installation.
+        existing_req.constraint = False
+        # If we're now installing a user supplied requirement,
+        # mark the existing object as such.
+        if install_req.user_supplied:
+            existing_req.user_supplied = True
+        existing_req.extras = tuple(
+            sorted(set(existing_req.extras) | set(install_req.extras))
+        )
+        logger.debug(
+            "Setting %s extras to: %s",
+            existing_req,
+            existing_req.extras,
+        )
+        # Return the existing requirement for addition to the parent and
+        # scanning again.
+        return [existing_req], existing_req
+
+    def _is_upgrade_allowed(self, req: InstallRequirement) -> bool:
+        if self.upgrade_strategy == "to-satisfy-only":
+            return False
+        elif self.upgrade_strategy == "eager":
+            return True
+        else:
+            assert self.upgrade_strategy == "only-if-needed"
+            return req.user_supplied or req.constraint
+
+    def _set_req_to_reinstall(self, req: InstallRequirement) -> None:
+        """
+        Set a requirement to be installed.
+        """
+        # Don't uninstall the conflict if doing a user install and the
+        # conflict is not a user install.
+        if not self.use_user_site or req.satisfied_by.in_usersite:
+            req.should_reinstall = True
+        req.satisfied_by = None
+
+    def _check_skip_installed(
+        self, req_to_install: InstallRequirement
+    ) -> Optional[str]:
+        """Check if req_to_install should be skipped.
+
+        This will check if the req is installed, and whether we should upgrade
+        or reinstall it, taking into account all the relevant user options.
+
+        After calling this req_to_install will only have satisfied_by set to
+        None if the req_to_install is to be upgraded/reinstalled etc. Any
+        other value will be a dist recording the current thing installed that
+        satisfies the requirement.
+
+        Note that for vcs urls and the like we can't assess skipping in this
+        routine - we simply identify that we need to pull the thing down,
+        then later on it is pulled down and introspected to assess upgrade/
+        reinstalls etc.
+
+        :return: A text reason for why it was skipped, or None.
+        """
+        if self.ignore_installed:
+            return None
+
+        req_to_install.check_if_exists(self.use_user_site)
+        if not req_to_install.satisfied_by:
+            return None
+
+        if self.force_reinstall:
+            self._set_req_to_reinstall(req_to_install)
+            return None
+
+        if not self._is_upgrade_allowed(req_to_install):
+            if self.upgrade_strategy == "only-if-needed":
+                return "already satisfied, skipping upgrade"
+            return "already satisfied"
+
+        # Check for the possibility of an upgrade.  For link-based
+        # requirements we have to pull the tree down and inspect to assess
+        # the version #, so it's handled way down.
+        if not req_to_install.link:
+            try:
+                self.finder.find_requirement(req_to_install, upgrade=True)
+            except BestVersionAlreadyInstalled:
+                # Then the best version is installed.
+                return "already up-to-date"
+            except DistributionNotFound:
+                # No distribution found, so we squash the error.  It will
+                # be raised later when we re-try later to do the install.
+                # Why don't we just raise here?
+                pass
+
+        self._set_req_to_reinstall(req_to_install)
+        return None
+
+    def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]:
+        upgrade = self._is_upgrade_allowed(req)
+        best_candidate = self.finder.find_requirement(req, upgrade)
+        if not best_candidate:
+            return None
+
+        # Log a warning per PEP 592 if necessary before returning.
+        link = best_candidate.link
+        if link.is_yanked:
+            reason = link.yanked_reason or "<none given>"
+            msg = (
+                # Mark this as a unicode string to prevent
+                # "UnicodeEncodeError: 'ascii' codec can't encode character"
+                # in Python 2 when the reason contains non-ascii characters.
+                "The candidate selected for download or install is a "
+                "yanked version: {candidate}\n"
+                "Reason for being yanked: {reason}"
+            ).format(candidate=best_candidate, reason=reason)
+            logger.warning(msg)
+
+        return link
+
+    def _populate_link(self, req: InstallRequirement) -> None:
+        """Ensure that if a link can be found for this, that it is found.
+
+        Note that req.link may still be None - if the requirement is already
+        installed and not needed to be upgraded based on the return value of
+        _is_upgrade_allowed().
+
+        If preparer.require_hashes is True, don't use the wheel cache, because
+        cached wheels, always built locally, have different hashes than the
+        files downloaded from the index server and thus throw false hash
+        mismatches. Furthermore, cached wheels at present have undeterministic
+        contents due to file modification times.
+        """
+        if req.link is None:
+            req.link = self._find_requirement_link(req)
+
+        if self.wheel_cache is None or self.preparer.require_hashes:
+            return
+        cache_entry = self.wheel_cache.get_cache_entry(
+            link=req.link,
+            package_name=req.name,
+            supported_tags=get_supported(),
+        )
+        if cache_entry is not None:
+            logger.debug("Using cached wheel link: %s", cache_entry.link)
+            if req.link is req.original_link and cache_entry.persistent:
+                req.original_link_is_in_wheel_cache = True
+            if cache_entry.origin is not None:
+                req.download_info = cache_entry.origin
+            else:
+                # Legacy cache entry that does not have origin.json.
+                # download_info may miss the archive_info.hash field.
+                req.download_info = direct_url_from_link(
+                    req.link, link_is_in_wheel_cache=cache_entry.persistent
+                )
+            req.link = cache_entry.link
+
+    def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution:
+        """Takes a InstallRequirement and returns a single AbstractDist \
+        representing a prepared variant of the same.
+        """
+        if req.editable:
+            return self.preparer.prepare_editable_requirement(req)
+
+        # satisfied_by is only evaluated by calling _check_skip_installed,
+        # so it must be None here.
+        assert req.satisfied_by is None
+        skip_reason = self._check_skip_installed(req)
+
+        if req.satisfied_by:
+            return self.preparer.prepare_installed_requirement(req, skip_reason)
+
+        # We eagerly populate the link, since that's our "legacy" behavior.
+        self._populate_link(req)
+        dist = self.preparer.prepare_linked_requirement(req)
+
+        # NOTE
+        # The following portion is for determining if a certain package is
+        # going to be re-installed/upgraded or not and reporting to the user.
+        # This should probably get cleaned up in a future refactor.
+
+        # req.req is only avail after unpack for URL
+        # pkgs repeat check_if_exists to uninstall-on-upgrade
+        # (#14)
+        if not self.ignore_installed:
+            req.check_if_exists(self.use_user_site)
+
+        if req.satisfied_by:
+            should_modify = (
+                self.upgrade_strategy != "to-satisfy-only"
+                or self.force_reinstall
+                or self.ignore_installed
+                or req.link.scheme == "file"
+            )
+            if should_modify:
+                self._set_req_to_reinstall(req)
+            else:
+                logger.info(
+                    "Requirement already satisfied (use --upgrade to upgrade): %s",
+                    req,
+                )
+        return dist
+
+    def _resolve_one(
+        self,
+        requirement_set: RequirementSet,
+        req_to_install: InstallRequirement,
+    ) -> List[InstallRequirement]:
+        """Prepare a single requirements file.
+
+        :return: A list of additional InstallRequirements to also install.
+        """
+        # Tell user what we are doing for this requirement:
+        # obtain (editable), skipping, processing (local url), collecting
+        # (remote url or package name)
+        if req_to_install.constraint or req_to_install.prepared:
+            return []
+
+        req_to_install.prepared = True
+
+        # Parse and return dependencies
+        dist = self._get_dist_for(req_to_install)
+        # This will raise UnsupportedPythonVersion if the given Python
+        # version isn't compatible with the distribution's Requires-Python.
+        _check_dist_requires_python(
+            dist,
+            version_info=self._py_version_info,
+            ignore_requires_python=self.ignore_requires_python,
+        )
+
+        more_reqs: List[InstallRequirement] = []
+
+        def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None:
+            # This idiosyncratically converts the Requirement to str and let
+            # make_install_req then parse it again into Requirement. But this is
+            # the legacy resolver so I'm just not going to bother refactoring.
+            sub_install_req = self._make_install_req(str(subreq), req_to_install)
+            parent_req_name = req_to_install.name
+            to_scan_again, add_to_parent = self._add_requirement_to_set(
+                requirement_set,
+                sub_install_req,
+                parent_req_name=parent_req_name,
+                extras_requested=extras_requested,
+            )
+            if parent_req_name and add_to_parent:
+                self._discovered_dependencies[parent_req_name].append(add_to_parent)
+            more_reqs.extend(to_scan_again)
+
+        with indent_log():
+            # We add req_to_install before its dependencies, so that we
+            # can refer to it when adding dependencies.
+            if not requirement_set.has_requirement(req_to_install.name):
+                # 'unnamed' requirements will get added here
+                # 'unnamed' requirements can only come from being directly
+                # provided by the user.
+                assert req_to_install.user_supplied
+                self._add_requirement_to_set(
+                    requirement_set, req_to_install, parent_req_name=None
+                )
+
+            if not self.ignore_dependencies:
+                if req_to_install.extras:
+                    logger.debug(
+                        "Installing extra requirements: %r",
+                        ",".join(req_to_install.extras),
+                    )
+                missing_requested = sorted(
+                    set(req_to_install.extras) - set(dist.iter_provided_extras())
+                )
+                for missing in missing_requested:
+                    logger.warning(
+                        "%s %s does not provide the extra '%s'",
+                        dist.raw_name,
+                        dist.version,
+                        missing,
+                    )
+
+                available_requested = sorted(
+                    set(dist.iter_provided_extras()) & set(req_to_install.extras)
+                )
+                for subreq in dist.iter_dependencies(available_requested):
+                    add_req(subreq, extras_requested=available_requested)
+
+        return more_reqs
+
+    def get_installation_order(
+        self, req_set: RequirementSet
+    ) -> List[InstallRequirement]:
+        """Create the installation order.
+
+        The installation order is topological - requirements are installed
+        before the requiring thing. We break cycles at an arbitrary point,
+        and make no other guarantees.
+        """
+        # The current implementation, which we may change at any point
+        # installs the user specified things in the order given, except when
+        # dependencies must come earlier to achieve topological order.
+        order = []
+        ordered_reqs: Set[InstallRequirement] = set()
+
+        def schedule(req: InstallRequirement) -> None:
+            if req.satisfied_by or req in ordered_reqs:
+                return
+            if req.constraint:
+                return
+            ordered_reqs.add(req)
+            for dep in self._discovered_dependencies[req.name]:
+                schedule(dep)
+            order.append(req)
+
+        for install_req in req_set.requirements.values():
+            schedule(install_req)
+        return order
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__init__.py
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..f087280
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc
new file mode 100644
index 0000000..a51d174
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc
new file mode 100644
index 0000000..84dafda
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc
new file mode 100644
index 0000000..7b27f87
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc
new file mode 100644
index 0000000..c35dcd1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc
new file mode 100644
index 0000000..b6acf6f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc
new file mode 100644
index 0000000..371bdcf
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc
new file mode 100644
index 0000000..f0c3d92
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc
new file mode 100644
index 0000000..c2842ec
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/base.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/base.py
new file mode 100644
index 0000000..b206692
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/base.py
@@ -0,0 +1,141 @@
+from typing import FrozenSet, Iterable, Optional, Tuple, Union
+
+from pip._vendor.packaging.specifiers import SpecifierSet
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import LegacyVersion, Version
+
+from pip._internal.models.link import Link, links_equivalent
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.hashes import Hashes
+
+CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
+CandidateVersion = Union[LegacyVersion, Version]
+
+
+def format_name(project: str, extras: FrozenSet[str]) -> str:
+    if not extras:
+        return project
+    canonical_extras = sorted(canonicalize_name(e) for e in extras)
+    return "{}[{}]".format(project, ",".join(canonical_extras))
+
+
+class Constraint:
+    def __init__(
+        self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link]
+    ) -> None:
+        self.specifier = specifier
+        self.hashes = hashes
+        self.links = links
+
+    @classmethod
+    def empty(cls) -> "Constraint":
+        return Constraint(SpecifierSet(), Hashes(), frozenset())
+
+    @classmethod
+    def from_ireq(cls, ireq: InstallRequirement) -> "Constraint":
+        links = frozenset([ireq.link]) if ireq.link else frozenset()
+        return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links)
+
+    def __bool__(self) -> bool:
+        return bool(self.specifier) or bool(self.hashes) or bool(self.links)
+
+    def __and__(self, other: InstallRequirement) -> "Constraint":
+        if not isinstance(other, InstallRequirement):
+            return NotImplemented
+        specifier = self.specifier & other.specifier
+        hashes = self.hashes & other.hashes(trust_internet=False)
+        links = self.links
+        if other.link:
+            links = links.union([other.link])
+        return Constraint(specifier, hashes, links)
+
+    def is_satisfied_by(self, candidate: "Candidate") -> bool:
+        # Reject if there are any mismatched URL constraints on this package.
+        if self.links and not all(_match_link(link, candidate) for link in self.links):
+            return False
+        # We can safely always allow prereleases here since PackageFinder
+        # already implements the prerelease logic, and would have filtered out
+        # prerelease candidates if the user does not expect them.
+        return self.specifier.contains(candidate.version, prereleases=True)
+
+
+class Requirement:
+    @property
+    def project_name(self) -> NormalizedName:
+        """The "project name" of a requirement.
+
+        This is different from ``name`` if this requirement contains extras,
+        in which case ``name`` would contain the ``[...]`` part, while this
+        refers to the name of the project.
+        """
+        raise NotImplementedError("Subclass should override")
+
+    @property
+    def name(self) -> str:
+        """The name identifying this requirement in the resolver.
+
+        This is different from ``project_name`` if this requirement contains
+        extras, where ``project_name`` would not contain the ``[...]`` part.
+        """
+        raise NotImplementedError("Subclass should override")
+
+    def is_satisfied_by(self, candidate: "Candidate") -> bool:
+        return False
+
+    def get_candidate_lookup(self) -> CandidateLookup:
+        raise NotImplementedError("Subclass should override")
+
+    def format_for_error(self) -> str:
+        raise NotImplementedError("Subclass should override")
+
+
+def _match_link(link: Link, candidate: "Candidate") -> bool:
+    if candidate.source_link:
+        return links_equivalent(link, candidate.source_link)
+    return False
+
+
+class Candidate:
+    @property
+    def project_name(self) -> NormalizedName:
+        """The "project name" of the candidate.
+
+        This is different from ``name`` if this candidate contains extras,
+        in which case ``name`` would contain the ``[...]`` part, while this
+        refers to the name of the project.
+        """
+        raise NotImplementedError("Override in subclass")
+
+    @property
+    def name(self) -> str:
+        """The name identifying this candidate in the resolver.
+
+        This is different from ``project_name`` if this candidate contains
+        extras, where ``project_name`` would not contain the ``[...]`` part.
+        """
+        raise NotImplementedError("Override in subclass")
+
+    @property
+    def version(self) -> CandidateVersion:
+        raise NotImplementedError("Override in subclass")
+
+    @property
+    def is_installed(self) -> bool:
+        raise NotImplementedError("Override in subclass")
+
+    @property
+    def is_editable(self) -> bool:
+        raise NotImplementedError("Override in subclass")
+
+    @property
+    def source_link(self) -> Optional[Link]:
+        raise NotImplementedError("Override in subclass")
+
+    def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
+        raise NotImplementedError("Override in subclass")
+
+    def get_install_requirement(self) -> Optional[InstallRequirement]:
+        raise NotImplementedError("Override in subclass")
+
+    def format_for_error(self) -> str:
+        raise NotImplementedError("Subclass should override")
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/candidates.py
new file mode 100644
index 0000000..f5bc343
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/candidates.py
@@ -0,0 +1,556 @@
+import logging
+import sys
+from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
+
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import Version
+
+from pip._internal.exceptions import (
+    HashError,
+    InstallationSubprocessError,
+    MetadataInconsistent,
+)
+from pip._internal.metadata import BaseDistribution
+from pip._internal.models.link import Link, links_equivalent
+from pip._internal.models.wheel import Wheel
+from pip._internal.req.constructors import (
+    install_req_from_editable,
+    install_req_from_line,
+)
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.direct_url_helpers import direct_url_from_link
+from pip._internal.utils.misc import normalize_version_info
+
+from .base import Candidate, CandidateVersion, Requirement, format_name
+
+if TYPE_CHECKING:
+    from .factory import Factory
+
+logger = logging.getLogger(__name__)
+
+BaseCandidate = Union[
+    "AlreadyInstalledCandidate",
+    "EditableCandidate",
+    "LinkCandidate",
+]
+
+# Avoid conflicting with the PyPI package "Python".
+REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "<Python from Requires-Python>")
+
+
+def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]:
+    """The runtime version of BaseCandidate."""
+    base_candidate_classes = (
+        AlreadyInstalledCandidate,
+        EditableCandidate,
+        LinkCandidate,
+    )
+    if isinstance(candidate, base_candidate_classes):
+        return candidate
+    return None
+
+
+def make_install_req_from_link(
+    link: Link, template: InstallRequirement
+) -> InstallRequirement:
+    assert not template.editable, "template is editable"
+    if template.req:
+        line = str(template.req)
+    else:
+        line = link.url
+    ireq = install_req_from_line(
+        line,
+        user_supplied=template.user_supplied,
+        comes_from=template.comes_from,
+        use_pep517=template.use_pep517,
+        isolated=template.isolated,
+        constraint=template.constraint,
+        options=dict(
+            install_options=template.install_options,
+            global_options=template.global_options,
+            hashes=template.hash_options,
+        ),
+        config_settings=template.config_settings,
+    )
+    ireq.original_link = template.original_link
+    ireq.link = link
+    return ireq
+
+
+def make_install_req_from_editable(
+    link: Link, template: InstallRequirement
+) -> InstallRequirement:
+    assert template.editable, "template not editable"
+    return install_req_from_editable(
+        link.url,
+        user_supplied=template.user_supplied,
+        comes_from=template.comes_from,
+        use_pep517=template.use_pep517,
+        isolated=template.isolated,
+        constraint=template.constraint,
+        permit_editable_wheels=template.permit_editable_wheels,
+        options=dict(
+            install_options=template.install_options,
+            global_options=template.global_options,
+            hashes=template.hash_options,
+        ),
+        config_settings=template.config_settings,
+    )
+
+
+def _make_install_req_from_dist(
+    dist: BaseDistribution, template: InstallRequirement
+) -> InstallRequirement:
+    if template.req:
+        line = str(template.req)
+    elif template.link:
+        line = f"{dist.canonical_name} @ {template.link.url}"
+    else:
+        line = f"{dist.canonical_name}=={dist.version}"
+    ireq = install_req_from_line(
+        line,
+        user_supplied=template.user_supplied,
+        comes_from=template.comes_from,
+        use_pep517=template.use_pep517,
+        isolated=template.isolated,
+        constraint=template.constraint,
+        options=dict(
+            install_options=template.install_options,
+            global_options=template.global_options,
+            hashes=template.hash_options,
+        ),
+        config_settings=template.config_settings,
+    )
+    ireq.satisfied_by = dist
+    return ireq
+
+
+class _InstallRequirementBackedCandidate(Candidate):
+    """A candidate backed by an ``InstallRequirement``.
+
+    This represents a package request with the target not being already
+    in the environment, and needs to be fetched and installed. The backing
+    ``InstallRequirement`` is responsible for most of the leg work; this
+    class exposes appropriate information to the resolver.
+
+    :param link: The link passed to the ``InstallRequirement``. The backing
+        ``InstallRequirement`` will use this link to fetch the distribution.
+    :param source_link: The link this candidate "originates" from. This is
+        different from ``link`` when the link is found in the wheel cache.
+        ``link`` would point to the wheel cache, while this points to the
+        found remote link (e.g. from pypi.org).
+    """
+
+    dist: BaseDistribution
+    is_installed = False
+
+    def __init__(
+        self,
+        link: Link,
+        source_link: Link,
+        ireq: InstallRequirement,
+        factory: "Factory",
+        name: Optional[NormalizedName] = None,
+        version: Optional[CandidateVersion] = None,
+    ) -> None:
+        self._link = link
+        self._source_link = source_link
+        self._factory = factory
+        self._ireq = ireq
+        self._name = name
+        self._version = version
+        self.dist = self._prepare()
+
+    def __str__(self) -> str:
+        return f"{self.name} {self.version}"
+
+    def __repr__(self) -> str:
+        return "{class_name}({link!r})".format(
+            class_name=self.__class__.__name__,
+            link=str(self._link),
+        )
+
+    def __hash__(self) -> int:
+        return hash((self.__class__, self._link))
+
+    def __eq__(self, other: Any) -> bool:
+        if isinstance(other, self.__class__):
+            return links_equivalent(self._link, other._link)
+        return False
+
+    @property
+    def source_link(self) -> Optional[Link]:
+        return self._source_link
+
+    @property
+    def project_name(self) -> NormalizedName:
+        """The normalised name of the project the candidate refers to"""
+        if self._name is None:
+            self._name = self.dist.canonical_name
+        return self._name
+
+    @property
+    def name(self) -> str:
+        return self.project_name
+
+    @property
+    def version(self) -> CandidateVersion:
+        if self._version is None:
+            self._version = self.dist.version
+        return self._version
+
+    def format_for_error(self) -> str:
+        return "{} {} (from {})".format(
+            self.name,
+            self.version,
+            self._link.file_path if self._link.is_file else self._link,
+        )
+
+    def _prepare_distribution(self) -> BaseDistribution:
+        raise NotImplementedError("Override in subclass")
+
+    def _check_metadata_consistency(self, dist: BaseDistribution) -> None:
+        """Check for consistency of project name and version of dist."""
+        if self._name is not None and self._name != dist.canonical_name:
+            raise MetadataInconsistent(
+                self._ireq,
+                "name",
+                self._name,
+                dist.canonical_name,
+            )
+        if self._version is not None and self._version != dist.version:
+            raise MetadataInconsistent(
+                self._ireq,
+                "version",
+                str(self._version),
+                str(dist.version),
+            )
+
+    def _prepare(self) -> BaseDistribution:
+        try:
+            dist = self._prepare_distribution()
+        except HashError as e:
+            # Provide HashError the underlying ireq that caused it. This
+            # provides context for the resulting error message to show the
+            # offending line to the user.
+            e.req = self._ireq
+            raise
+        except InstallationSubprocessError as exc:
+            # The output has been presented already, so don't duplicate it.
+            exc.context = "See above for output."
+            raise
+
+        self._check_metadata_consistency(dist)
+        return dist
+
+    def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
+        requires = self.dist.iter_dependencies() if with_requires else ()
+        for r in requires:
+            yield self._factory.make_requirement_from_spec(str(r), self._ireq)
+        yield self._factory.make_requires_python_requirement(self.dist.requires_python)
+
+    def get_install_requirement(self) -> Optional[InstallRequirement]:
+        return self._ireq
+
+
+class LinkCandidate(_InstallRequirementBackedCandidate):
+    is_editable = False
+
+    def __init__(
+        self,
+        link: Link,
+        template: InstallRequirement,
+        factory: "Factory",
+        name: Optional[NormalizedName] = None,
+        version: Optional[CandidateVersion] = None,
+    ) -> None:
+        source_link = link
+        cache_entry = factory.get_wheel_cache_entry(link, name)
+        if cache_entry is not None:
+            logger.debug("Using cached wheel link: %s", cache_entry.link)
+            link = cache_entry.link
+        ireq = make_install_req_from_link(link, template)
+        assert ireq.link == link
+        if ireq.link.is_wheel and not ireq.link.is_file:
+            wheel = Wheel(ireq.link.filename)
+            wheel_name = canonicalize_name(wheel.name)
+            assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel"
+            # Version may not be present for PEP 508 direct URLs
+            if version is not None:
+                wheel_version = Version(wheel.version)
+                assert version == wheel_version, "{!r} != {!r} for wheel {}".format(
+                    version, wheel_version, name
+                )
+
+        if cache_entry is not None:
+            if cache_entry.persistent and template.link is template.original_link:
+                ireq.original_link_is_in_wheel_cache = True
+            if cache_entry.origin is not None:
+                ireq.download_info = cache_entry.origin
+            else:
+                # Legacy cache entry that does not have origin.json.
+                # download_info may miss the archive_info.hash field.
+                ireq.download_info = direct_url_from_link(
+                    source_link, link_is_in_wheel_cache=cache_entry.persistent
+                )
+
+        super().__init__(
+            link=link,
+            source_link=source_link,
+            ireq=ireq,
+            factory=factory,
+            name=name,
+            version=version,
+        )
+
+    def _prepare_distribution(self) -> BaseDistribution:
+        preparer = self._factory.preparer
+        return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True)
+
+
+class EditableCandidate(_InstallRequirementBackedCandidate):
+    is_editable = True
+
+    def __init__(
+        self,
+        link: Link,
+        template: InstallRequirement,
+        factory: "Factory",
+        name: Optional[NormalizedName] = None,
+        version: Optional[CandidateVersion] = None,
+    ) -> None:
+        super().__init__(
+            link=link,
+            source_link=link,
+            ireq=make_install_req_from_editable(link, template),
+            factory=factory,
+            name=name,
+            version=version,
+        )
+
+    def _prepare_distribution(self) -> BaseDistribution:
+        return self._factory.preparer.prepare_editable_requirement(self._ireq)
+
+
+class AlreadyInstalledCandidate(Candidate):
+    is_installed = True
+    source_link = None
+
+    def __init__(
+        self,
+        dist: BaseDistribution,
+        template: InstallRequirement,
+        factory: "Factory",
+    ) -> None:
+        self.dist = dist
+        self._ireq = _make_install_req_from_dist(dist, template)
+        self._factory = factory
+
+        # This is just logging some messages, so we can do it eagerly.
+        # The returned dist would be exactly the same as self.dist because we
+        # set satisfied_by in _make_install_req_from_dist.
+        # TODO: Supply reason based on force_reinstall and upgrade_strategy.
+        skip_reason = "already satisfied"
+        factory.preparer.prepare_installed_requirement(self._ireq, skip_reason)
+
+    def __str__(self) -> str:
+        return str(self.dist)
+
+    def __repr__(self) -> str:
+        return "{class_name}({distribution!r})".format(
+            class_name=self.__class__.__name__,
+            distribution=self.dist,
+        )
+
+    def __hash__(self) -> int:
+        return hash((self.__class__, self.name, self.version))
+
+    def __eq__(self, other: Any) -> bool:
+        if isinstance(other, self.__class__):
+            return self.name == other.name and self.version == other.version
+        return False
+
+    @property
+    def project_name(self) -> NormalizedName:
+        return self.dist.canonical_name
+
+    @property
+    def name(self) -> str:
+        return self.project_name
+
+    @property
+    def version(self) -> CandidateVersion:
+        return self.dist.version
+
+    @property
+    def is_editable(self) -> bool:
+        return self.dist.editable
+
+    def format_for_error(self) -> str:
+        return f"{self.name} {self.version} (Installed)"
+
+    def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
+        if not with_requires:
+            return
+        for r in self.dist.iter_dependencies():
+            yield self._factory.make_requirement_from_spec(str(r), self._ireq)
+
+    def get_install_requirement(self) -> Optional[InstallRequirement]:
+        return None
+
+
+class ExtrasCandidate(Candidate):
+    """A candidate that has 'extras', indicating additional dependencies.
+
+    Requirements can be for a project with dependencies, something like
+    foo[extra].  The extras don't affect the project/version being installed
+    directly, but indicate that we need additional dependencies. We model that
+    by having an artificial ExtrasCandidate that wraps the "base" candidate.
+
+    The ExtrasCandidate differs from the base in the following ways:
+
+    1. It has a unique name, of the form foo[extra]. This causes the resolver
+       to treat it as a separate node in the dependency graph.
+    2. When we're getting the candidate's dependencies,
+       a) We specify that we want the extra dependencies as well.
+       b) We add a dependency on the base candidate.
+          See below for why this is needed.
+    3. We return None for the underlying InstallRequirement, as the base
+       candidate will provide it, and we don't want to end up with duplicates.
+
+    The dependency on the base candidate is needed so that the resolver can't
+    decide that it should recommend foo[extra1] version 1.0 and foo[extra2]
+    version 2.0. Having those candidates depend on foo=1.0 and foo=2.0
+    respectively forces the resolver to recognise that this is a conflict.
+    """
+
+    def __init__(
+        self,
+        base: BaseCandidate,
+        extras: FrozenSet[str],
+    ) -> None:
+        self.base = base
+        self.extras = extras
+
+    def __str__(self) -> str:
+        name, rest = str(self.base).split(" ", 1)
+        return "{}[{}] {}".format(name, ",".join(self.extras), rest)
+
+    def __repr__(self) -> str:
+        return "{class_name}(base={base!r}, extras={extras!r})".format(
+            class_name=self.__class__.__name__,
+            base=self.base,
+            extras=self.extras,
+        )
+
+    def __hash__(self) -> int:
+        return hash((self.base, self.extras))
+
+    def __eq__(self, other: Any) -> bool:
+        if isinstance(other, self.__class__):
+            return self.base == other.base and self.extras == other.extras
+        return False
+
+    @property
+    def project_name(self) -> NormalizedName:
+        return self.base.project_name
+
+    @property
+    def name(self) -> str:
+        """The normalised name of the project the candidate refers to"""
+        return format_name(self.base.project_name, self.extras)
+
+    @property
+    def version(self) -> CandidateVersion:
+        return self.base.version
+
+    def format_for_error(self) -> str:
+        return "{} [{}]".format(
+            self.base.format_for_error(), ", ".join(sorted(self.extras))
+        )
+
+    @property
+    def is_installed(self) -> bool:
+        return self.base.is_installed
+
+    @property
+    def is_editable(self) -> bool:
+        return self.base.is_editable
+
+    @property
+    def source_link(self) -> Optional[Link]:
+        return self.base.source_link
+
+    def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
+        factory = self.base._factory
+
+        # Add a dependency on the exact base
+        # (See note 2b in the class docstring)
+        yield factory.make_requirement_from_candidate(self.base)
+        if not with_requires:
+            return
+
+        # The user may have specified extras that the candidate doesn't
+        # support. We ignore any unsupported extras here.
+        valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras())
+        invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras())
+        for extra in sorted(invalid_extras):
+            logger.warning(
+                "%s %s does not provide the extra '%s'",
+                self.base.name,
+                self.version,
+                extra,
+            )
+
+        for r in self.base.dist.iter_dependencies(valid_extras):
+            requirement = factory.make_requirement_from_spec(
+                str(r), self.base._ireq, valid_extras
+            )
+            if requirement:
+                yield requirement
+
+    def get_install_requirement(self) -> Optional[InstallRequirement]:
+        # We don't return anything here, because we always
+        # depend on the base candidate, and we'll get the
+        # install requirement from that.
+        return None
+
+
+class RequiresPythonCandidate(Candidate):
+    is_installed = False
+    source_link = None
+
+    def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None:
+        if py_version_info is not None:
+            version_info = normalize_version_info(py_version_info)
+        else:
+            version_info = sys.version_info[:3]
+        self._version = Version(".".join(str(c) for c in version_info))
+
+    # We don't need to implement __eq__() and __ne__() since there is always
+    # only one RequiresPythonCandidate in a resolution, i.e. the host Python.
+    # The built-in object.__eq__() and object.__ne__() do exactly what we want.
+
+    def __str__(self) -> str:
+        return f"Python {self._version}"
+
+    @property
+    def project_name(self) -> NormalizedName:
+        return REQUIRES_PYTHON_IDENTIFIER
+
+    @property
+    def name(self) -> str:
+        return REQUIRES_PYTHON_IDENTIFIER
+
+    @property
+    def version(self) -> CandidateVersion:
+        return self._version
+
+    def format_for_error(self) -> str:
+        return f"Python {self.version}"
+
+    def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
+        return ()
+
+    def get_install_requirement(self) -> Optional[InstallRequirement]:
+        return None
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/factory.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/factory.py
new file mode 100644
index 0000000..a4c24b5
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/factory.py
@@ -0,0 +1,731 @@
+import contextlib
+import functools
+import logging
+from typing import (
+    TYPE_CHECKING,
+    Dict,
+    FrozenSet,
+    Iterable,
+    Iterator,
+    List,
+    Mapping,
+    NamedTuple,
+    Optional,
+    Sequence,
+    Set,
+    Tuple,
+    TypeVar,
+    cast,
+)
+
+from pip._vendor.packaging.requirements import InvalidRequirement
+from pip._vendor.packaging.specifiers import SpecifierSet
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.resolvelib import ResolutionImpossible
+
+from pip._internal.cache import CacheEntry, WheelCache
+from pip._internal.exceptions import (
+    DistributionNotFound,
+    InstallationError,
+    MetadataInconsistent,
+    UnsupportedPythonVersion,
+    UnsupportedWheel,
+)
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution, get_default_environment
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.constructors import install_req_from_link_and_ireq
+from pip._internal.req.req_install import (
+    InstallRequirement,
+    check_invalid_constraint_type,
+)
+from pip._internal.resolution.base import InstallRequirementProvider
+from pip._internal.utils.compatibility_tags import get_supported
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.packaging import get_requirement
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from .base import Candidate, CandidateVersion, Constraint, Requirement
+from .candidates import (
+    AlreadyInstalledCandidate,
+    BaseCandidate,
+    EditableCandidate,
+    ExtrasCandidate,
+    LinkCandidate,
+    RequiresPythonCandidate,
+    as_base_candidate,
+)
+from .found_candidates import FoundCandidates, IndexCandidateInfo
+from .requirements import (
+    ExplicitRequirement,
+    RequiresPythonRequirement,
+    SpecifierRequirement,
+    UnsatisfiableRequirement,
+)
+
+if TYPE_CHECKING:
+    from typing import Protocol
+
+    class ConflictCause(Protocol):
+        requirement: RequiresPythonRequirement
+        parent: Candidate
+
+
+logger = logging.getLogger(__name__)
+
+C = TypeVar("C")
+Cache = Dict[Link, C]
+
+
+class CollectedRootRequirements(NamedTuple):
+    requirements: List[Requirement]
+    constraints: Dict[str, Constraint]
+    user_requested: Dict[str, int]
+
+
+class Factory:
+    def __init__(
+        self,
+        finder: PackageFinder,
+        preparer: RequirementPreparer,
+        make_install_req: InstallRequirementProvider,
+        wheel_cache: Optional[WheelCache],
+        use_user_site: bool,
+        force_reinstall: bool,
+        ignore_installed: bool,
+        ignore_requires_python: bool,
+        py_version_info: Optional[Tuple[int, ...]] = None,
+    ) -> None:
+        self._finder = finder
+        self.preparer = preparer
+        self._wheel_cache = wheel_cache
+        self._python_candidate = RequiresPythonCandidate(py_version_info)
+        self._make_install_req_from_spec = make_install_req
+        self._use_user_site = use_user_site
+        self._force_reinstall = force_reinstall
+        self._ignore_requires_python = ignore_requires_python
+
+        self._build_failures: Cache[InstallationError] = {}
+        self._link_candidate_cache: Cache[LinkCandidate] = {}
+        self._editable_candidate_cache: Cache[EditableCandidate] = {}
+        self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {}
+        self._extras_candidate_cache: Dict[
+            Tuple[int, FrozenSet[str]], ExtrasCandidate
+        ] = {}
+
+        if not ignore_installed:
+            env = get_default_environment()
+            self._installed_dists = {
+                dist.canonical_name: dist
+                for dist in env.iter_installed_distributions(local_only=False)
+            }
+        else:
+            self._installed_dists = {}
+
+    @property
+    def force_reinstall(self) -> bool:
+        return self._force_reinstall
+
+    def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None:
+        if not link.is_wheel:
+            return
+        wheel = Wheel(link.filename)
+        if wheel.supported(self._finder.target_python.get_tags()):
+            return
+        msg = f"{link.filename} is not a supported wheel on this platform."
+        raise UnsupportedWheel(msg)
+
+    def _make_extras_candidate(
+        self, base: BaseCandidate, extras: FrozenSet[str]
+    ) -> ExtrasCandidate:
+        cache_key = (id(base), extras)
+        try:
+            candidate = self._extras_candidate_cache[cache_key]
+        except KeyError:
+            candidate = ExtrasCandidate(base, extras)
+            self._extras_candidate_cache[cache_key] = candidate
+        return candidate
+
+    def _make_candidate_from_dist(
+        self,
+        dist: BaseDistribution,
+        extras: FrozenSet[str],
+        template: InstallRequirement,
+    ) -> Candidate:
+        try:
+            base = self._installed_candidate_cache[dist.canonical_name]
+        except KeyError:
+            base = AlreadyInstalledCandidate(dist, template, factory=self)
+            self._installed_candidate_cache[dist.canonical_name] = base
+        if not extras:
+            return base
+        return self._make_extras_candidate(base, extras)
+
+    def _make_candidate_from_link(
+        self,
+        link: Link,
+        extras: FrozenSet[str],
+        template: InstallRequirement,
+        name: Optional[NormalizedName],
+        version: Optional[CandidateVersion],
+    ) -> Optional[Candidate]:
+        # TODO: Check already installed candidate, and use it if the link and
+        # editable flag match.
+
+        if link in self._build_failures:
+            # We already tried this candidate before, and it does not build.
+            # Don't bother trying again.
+            return None
+
+        if template.editable:
+            if link not in self._editable_candidate_cache:
+                try:
+                    self._editable_candidate_cache[link] = EditableCandidate(
+                        link,
+                        template,
+                        factory=self,
+                        name=name,
+                        version=version,
+                    )
+                except MetadataInconsistent as e:
+                    logger.info(
+                        "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
+                        link,
+                        e,
+                        extra={"markup": True},
+                    )
+                    self._build_failures[link] = e
+                    return None
+
+            base: BaseCandidate = self._editable_candidate_cache[link]
+        else:
+            if link not in self._link_candidate_cache:
+                try:
+                    self._link_candidate_cache[link] = LinkCandidate(
+                        link,
+                        template,
+                        factory=self,
+                        name=name,
+                        version=version,
+                    )
+                except MetadataInconsistent as e:
+                    logger.info(
+                        "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
+                        link,
+                        e,
+                        extra={"markup": True},
+                    )
+                    self._build_failures[link] = e
+                    return None
+            base = self._link_candidate_cache[link]
+
+        if not extras:
+            return base
+        return self._make_extras_candidate(base, extras)
+
+    def _iter_found_candidates(
+        self,
+        ireqs: Sequence[InstallRequirement],
+        specifier: SpecifierSet,
+        hashes: Hashes,
+        prefers_installed: bool,
+        incompatible_ids: Set[int],
+    ) -> Iterable[Candidate]:
+        if not ireqs:
+            return ()
+
+        # The InstallRequirement implementation requires us to give it a
+        # "template". Here we just choose the first requirement to represent
+        # all of them.
+        # Hopefully the Project model can correct this mismatch in the future.
+        template = ireqs[0]
+        assert template.req, "Candidates found on index must be PEP 508"
+        name = canonicalize_name(template.req.name)
+
+        extras: FrozenSet[str] = frozenset()
+        for ireq in ireqs:
+            assert ireq.req, "Candidates found on index must be PEP 508"
+            specifier &= ireq.req.specifier
+            hashes &= ireq.hashes(trust_internet=False)
+            extras |= frozenset(ireq.extras)
+
+        def _get_installed_candidate() -> Optional[Candidate]:
+            """Get the candidate for the currently-installed version."""
+            # If --force-reinstall is set, we want the version from the index
+            # instead, so we "pretend" there is nothing installed.
+            if self._force_reinstall:
+                return None
+            try:
+                installed_dist = self._installed_dists[name]
+            except KeyError:
+                return None
+            # Don't use the installed distribution if its version does not fit
+            # the current dependency graph.
+            if not specifier.contains(installed_dist.version, prereleases=True):
+                return None
+            candidate = self._make_candidate_from_dist(
+                dist=installed_dist,
+                extras=extras,
+                template=template,
+            )
+            # The candidate is a known incompatibility. Don't use it.
+            if id(candidate) in incompatible_ids:
+                return None
+            return candidate
+
+        def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]:
+            result = self._finder.find_best_candidate(
+                project_name=name,
+                specifier=specifier,
+                hashes=hashes,
+            )
+            icans = list(result.iter_applicable())
+
+            # PEP 592: Yanked releases are ignored unless the specifier
+            # explicitly pins a version (via '==' or '===') that can be
+            # solely satisfied by a yanked release.
+            all_yanked = all(ican.link.is_yanked for ican in icans)
+
+            def is_pinned(specifier: SpecifierSet) -> bool:
+                for sp in specifier:
+                    if sp.operator == "===":
+                        return True
+                    if sp.operator != "==":
+                        continue
+                    if sp.version.endswith(".*"):
+                        continue
+                    return True
+                return False
+
+            pinned = is_pinned(specifier)
+
+            # PackageFinder returns earlier versions first, so we reverse.
+            for ican in reversed(icans):
+                if not (all_yanked and pinned) and ican.link.is_yanked:
+                    continue
+                func = functools.partial(
+                    self._make_candidate_from_link,
+                    link=ican.link,
+                    extras=extras,
+                    template=template,
+                    name=name,
+                    version=ican.version,
+                )
+                yield ican.version, func
+
+        return FoundCandidates(
+            iter_index_candidate_infos,
+            _get_installed_candidate(),
+            prefers_installed,
+            incompatible_ids,
+        )
+
+    def _iter_explicit_candidates_from_base(
+        self,
+        base_requirements: Iterable[Requirement],
+        extras: FrozenSet[str],
+    ) -> Iterator[Candidate]:
+        """Produce explicit candidates from the base given an extra-ed package.
+
+        :param base_requirements: Requirements known to the resolver. The
+            requirements are guaranteed to not have extras.
+        :param extras: The extras to inject into the explicit requirements'
+            candidates.
+        """
+        for req in base_requirements:
+            lookup_cand, _ = req.get_candidate_lookup()
+            if lookup_cand is None:  # Not explicit.
+                continue
+            # We've stripped extras from the identifier, and should always
+            # get a BaseCandidate here, unless there's a bug elsewhere.
+            base_cand = as_base_candidate(lookup_cand)
+            assert base_cand is not None, "no extras here"
+            yield self._make_extras_candidate(base_cand, extras)
+
+    def _iter_candidates_from_constraints(
+        self,
+        identifier: str,
+        constraint: Constraint,
+        template: InstallRequirement,
+    ) -> Iterator[Candidate]:
+        """Produce explicit candidates from constraints.
+
+        This creates "fake" InstallRequirement objects that are basically clones
+        of what "should" be the template, but with original_link set to link.
+        """
+        for link in constraint.links:
+            self._fail_if_link_is_unsupported_wheel(link)
+            candidate = self._make_candidate_from_link(
+                link,
+                extras=frozenset(),
+                template=install_req_from_link_and_ireq(link, template),
+                name=canonicalize_name(identifier),
+                version=None,
+            )
+            if candidate:
+                yield candidate
+
+    def find_candidates(
+        self,
+        identifier: str,
+        requirements: Mapping[str, Iterable[Requirement]],
+        incompatibilities: Mapping[str, Iterator[Candidate]],
+        constraint: Constraint,
+        prefers_installed: bool,
+    ) -> Iterable[Candidate]:
+        # Collect basic lookup information from the requirements.
+        explicit_candidates: Set[Candidate] = set()
+        ireqs: List[InstallRequirement] = []
+        for req in requirements[identifier]:
+            cand, ireq = req.get_candidate_lookup()
+            if cand is not None:
+                explicit_candidates.add(cand)
+            if ireq is not None:
+                ireqs.append(ireq)
+
+        # If the current identifier contains extras, add explicit candidates
+        # from entries from extra-less identifier.
+        with contextlib.suppress(InvalidRequirement):
+            parsed_requirement = get_requirement(identifier)
+            explicit_candidates.update(
+                self._iter_explicit_candidates_from_base(
+                    requirements.get(parsed_requirement.name, ()),
+                    frozenset(parsed_requirement.extras),
+                ),
+            )
+
+        # Add explicit candidates from constraints. We only do this if there are
+        # known ireqs, which represent requirements not already explicit. If
+        # there are no ireqs, we're constraining already-explicit requirements,
+        # which is handled later when we return the explicit candidates.
+        if ireqs:
+            try:
+                explicit_candidates.update(
+                    self._iter_candidates_from_constraints(
+                        identifier,
+                        constraint,
+                        template=ireqs[0],
+                    ),
+                )
+            except UnsupportedWheel:
+                # If we're constrained to install a wheel incompatible with the
+                # target architecture, no candidates will ever be valid.
+                return ()
+
+        # Since we cache all the candidates, incompatibility identification
+        # can be made quicker by comparing only the id() values.
+        incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())}
+
+        # If none of the requirements want an explicit candidate, we can ask
+        # the finder for candidates.
+        if not explicit_candidates:
+            return self._iter_found_candidates(
+                ireqs,
+                constraint.specifier,
+                constraint.hashes,
+                prefers_installed,
+                incompat_ids,
+            )
+
+        return (
+            c
+            for c in explicit_candidates
+            if id(c) not in incompat_ids
+            and constraint.is_satisfied_by(c)
+            and all(req.is_satisfied_by(c) for req in requirements[identifier])
+        )
+
+    def _make_requirement_from_install_req(
+        self, ireq: InstallRequirement, requested_extras: Iterable[str]
+    ) -> Optional[Requirement]:
+        if not ireq.match_markers(requested_extras):
+            logger.info(
+                "Ignoring %s: markers '%s' don't match your environment",
+                ireq.name,
+                ireq.markers,
+            )
+            return None
+        if not ireq.link:
+            return SpecifierRequirement(ireq)
+        self._fail_if_link_is_unsupported_wheel(ireq.link)
+        cand = self._make_candidate_from_link(
+            ireq.link,
+            extras=frozenset(ireq.extras),
+            template=ireq,
+            name=canonicalize_name(ireq.name) if ireq.name else None,
+            version=None,
+        )
+        if cand is None:
+            # There's no way we can satisfy a URL requirement if the underlying
+            # candidate fails to build. An unnamed URL must be user-supplied, so
+            # we fail eagerly. If the URL is named, an unsatisfiable requirement
+            # can make the resolver do the right thing, either backtrack (and
+            # maybe find some other requirement that's buildable) or raise a
+            # ResolutionImpossible eventually.
+            if not ireq.name:
+                raise self._build_failures[ireq.link]
+            return UnsatisfiableRequirement(canonicalize_name(ireq.name))
+        return self.make_requirement_from_candidate(cand)
+
+    def collect_root_requirements(
+        self, root_ireqs: List[InstallRequirement]
+    ) -> CollectedRootRequirements:
+        collected = CollectedRootRequirements([], {}, {})
+        for i, ireq in enumerate(root_ireqs):
+            if ireq.constraint:
+                # Ensure we only accept valid constraints
+                problem = check_invalid_constraint_type(ireq)
+                if problem:
+                    raise InstallationError(problem)
+                if not ireq.match_markers():
+                    continue
+                assert ireq.name, "Constraint must be named"
+                name = canonicalize_name(ireq.name)
+                if name in collected.constraints:
+                    collected.constraints[name] &= ireq
+                else:
+                    collected.constraints[name] = Constraint.from_ireq(ireq)
+            else:
+                req = self._make_requirement_from_install_req(
+                    ireq,
+                    requested_extras=(),
+                )
+                if req is None:
+                    continue
+                if ireq.user_supplied and req.name not in collected.user_requested:
+                    collected.user_requested[req.name] = i
+                collected.requirements.append(req)
+        return collected
+
+    def make_requirement_from_candidate(
+        self, candidate: Candidate
+    ) -> ExplicitRequirement:
+        return ExplicitRequirement(candidate)
+
+    def make_requirement_from_spec(
+        self,
+        specifier: str,
+        comes_from: Optional[InstallRequirement],
+        requested_extras: Iterable[str] = (),
+    ) -> Optional[Requirement]:
+        ireq = self._make_install_req_from_spec(specifier, comes_from)
+        return self._make_requirement_from_install_req(ireq, requested_extras)
+
+    def make_requires_python_requirement(
+        self,
+        specifier: SpecifierSet,
+    ) -> Optional[Requirement]:
+        if self._ignore_requires_python:
+            return None
+        # Don't bother creating a dependency for an empty Requires-Python.
+        if not str(specifier):
+            return None
+        return RequiresPythonRequirement(specifier, self._python_candidate)
+
+    def get_wheel_cache_entry(
+        self, link: Link, name: Optional[str]
+    ) -> Optional[CacheEntry]:
+        """Look up the link in the wheel cache.
+
+        If ``preparer.require_hashes`` is True, don't use the wheel cache,
+        because cached wheels, always built locally, have different hashes
+        than the files downloaded from the index server and thus throw false
+        hash mismatches. Furthermore, cached wheels at present have
+        nondeterministic contents due to file modification times.
+        """
+        if self._wheel_cache is None or self.preparer.require_hashes:
+            return None
+        return self._wheel_cache.get_cache_entry(
+            link=link,
+            package_name=name,
+            supported_tags=get_supported(),
+        )
+
+    def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]:
+        # TODO: Are there more cases this needs to return True? Editable?
+        dist = self._installed_dists.get(candidate.project_name)
+        if dist is None:  # Not installed, no uninstallation required.
+            return None
+
+        # We're installing into global site. The current installation must
+        # be uninstalled, no matter it's in global or user site, because the
+        # user site installation has precedence over global.
+        if not self._use_user_site:
+            return dist
+
+        # We're installing into user site. Remove the user site installation.
+        if dist.in_usersite:
+            return dist
+
+        # We're installing into user site, but the installed incompatible
+        # package is in global site. We can't uninstall that, and would let
+        # the new user installation to "shadow" it. But shadowing won't work
+        # in virtual environments, so we error out.
+        if running_under_virtualenv() and dist.in_site_packages:
+            message = (
+                f"Will not install to the user site because it will lack "
+                f"sys.path precedence to {dist.raw_name} in {dist.location}"
+            )
+            raise InstallationError(message)
+        return None
+
+    def _report_requires_python_error(
+        self, causes: Sequence["ConflictCause"]
+    ) -> UnsupportedPythonVersion:
+        assert causes, "Requires-Python error reported with no cause"
+
+        version = self._python_candidate.version
+
+        if len(causes) == 1:
+            specifier = str(causes[0].requirement.specifier)
+            message = (
+                f"Package {causes[0].parent.name!r} requires a different "
+                f"Python: {version} not in {specifier!r}"
+            )
+            return UnsupportedPythonVersion(message)
+
+        message = f"Packages require a different Python. {version} not in:"
+        for cause in causes:
+            package = cause.parent.format_for_error()
+            specifier = str(cause.requirement.specifier)
+            message += f"\n{specifier!r} (required by {package})"
+        return UnsupportedPythonVersion(message)
+
+    def _report_single_requirement_conflict(
+        self, req: Requirement, parent: Optional[Candidate]
+    ) -> DistributionNotFound:
+        if parent is None:
+            req_disp = str(req)
+        else:
+            req_disp = f"{req} (from {parent.name})"
+
+        cands = self._finder.find_all_candidates(req.project_name)
+        skipped_by_requires_python = self._finder.requires_python_skipped_reasons()
+        versions = [str(v) for v in sorted({c.version for c in cands})]
+
+        if skipped_by_requires_python:
+            logger.critical(
+                "Ignored the following versions that require a different python "
+                "version: %s",
+                "; ".join(skipped_by_requires_python) or "none",
+            )
+        logger.critical(
+            "Could not find a version that satisfies the requirement %s "
+            "(from versions: %s)",
+            req_disp,
+            ", ".join(versions) or "none",
+        )
+        if str(req) == "requirements.txt":
+            logger.info(
+                "HINT: You are attempting to install a package literally "
+                'named "requirements.txt" (which cannot exist). Consider '
+                "using the '-r' flag to install the packages listed in "
+                "requirements.txt"
+            )
+
+        return DistributionNotFound(f"No matching distribution found for {req}")
+
+    def get_installation_error(
+        self,
+        e: "ResolutionImpossible[Requirement, Candidate]",
+        constraints: Dict[str, Constraint],
+    ) -> InstallationError:
+
+        assert e.causes, "Installation error reported with no cause"
+
+        # If one of the things we can't solve is "we need Python X.Y",
+        # that is what we report.
+        requires_python_causes = [
+            cause
+            for cause in e.causes
+            if isinstance(cause.requirement, RequiresPythonRequirement)
+            and not cause.requirement.is_satisfied_by(self._python_candidate)
+        ]
+        if requires_python_causes:
+            # The comprehension above makes sure all Requirement instances are
+            # RequiresPythonRequirement, so let's cast for convenience.
+            return self._report_requires_python_error(
+                cast("Sequence[ConflictCause]", requires_python_causes),
+            )
+
+        # Otherwise, we have a set of causes which can't all be satisfied
+        # at once.
+
+        # The simplest case is when we have *one* cause that can't be
+        # satisfied. We just report that case.
+        if len(e.causes) == 1:
+            req, parent = e.causes[0]
+            if req.name not in constraints:
+                return self._report_single_requirement_conflict(req, parent)
+
+        # OK, we now have a list of requirements that can't all be
+        # satisfied at once.
+
+        # A couple of formatting helpers
+        def text_join(parts: List[str]) -> str:
+            if len(parts) == 1:
+                return parts[0]
+
+            return ", ".join(parts[:-1]) + " and " + parts[-1]
+
+        def describe_trigger(parent: Candidate) -> str:
+            ireq = parent.get_install_requirement()
+            if not ireq or not ireq.comes_from:
+                return f"{parent.name}=={parent.version}"
+            if isinstance(ireq.comes_from, InstallRequirement):
+                return str(ireq.comes_from.name)
+            return str(ireq.comes_from)
+
+        triggers = set()
+        for req, parent in e.causes:
+            if parent is None:
+                # This is a root requirement, so we can report it directly
+                trigger = req.format_for_error()
+            else:
+                trigger = describe_trigger(parent)
+            triggers.add(trigger)
+
+        if triggers:
+            info = text_join(sorted(triggers))
+        else:
+            info = "the requested packages"
+
+        msg = (
+            "Cannot install {} because these package versions "
+            "have conflicting dependencies.".format(info)
+        )
+        logger.critical(msg)
+        msg = "\nThe conflict is caused by:"
+
+        relevant_constraints = set()
+        for req, parent in e.causes:
+            if req.name in constraints:
+                relevant_constraints.add(req.name)
+            msg = msg + "\n    "
+            if parent:
+                msg = msg + f"{parent.name} {parent.version} depends on "
+            else:
+                msg = msg + "The user requested "
+            msg = msg + req.format_for_error()
+        for key in relevant_constraints:
+            spec = constraints[key].specifier
+            msg += f"\n    The user requested (constraint) {key}{spec}"
+
+        msg = (
+            msg
+            + "\n\n"
+            + "To fix this you could try to:\n"
+            + "1. loosen the range of package versions you've specified\n"
+            + "2. remove package versions to allow pip attempt to solve "
+            + "the dependency conflict\n"
+        )
+
+        logger.info(msg)
+
+        return DistributionNotFound(
+            "ResolutionImpossible: for help visit "
+            "https://pip.pypa.io/en/latest/topics/dependency-resolution/"
+            "#dealing-with-dependency-conflicts"
+        )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py
new file mode 100644
index 0000000..8663097
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py
@@ -0,0 +1,155 @@
+"""Utilities to lazily create and visit candidates found.
+
+Creating and visiting a candidate is a *very* costly operation. It involves
+fetching, extracting, potentially building modules from source, and verifying
+distribution metadata. It is therefore crucial for performance to keep
+everything here lazy all the way down, so we only touch candidates that we
+absolutely need, and not "download the world" when we only need one version of
+something.
+"""
+
+import functools
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple
+
+from pip._vendor.packaging.version import _BaseVersion
+
+from .base import Candidate
+
+IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
+
+if TYPE_CHECKING:
+    SequenceCandidate = Sequence[Candidate]
+else:
+    # For compatibility: Python before 3.9 does not support using [] on the
+    # Sequence class.
+    #
+    # >>> from collections.abc import Sequence
+    # >>> Sequence[str]
+    # Traceback (most recent call last):
+    #   File "<stdin>", line 1, in <module>
+    # TypeError: 'ABCMeta' object is not subscriptable
+    #
+    # TODO: Remove this block after dropping Python 3.8 support.
+    SequenceCandidate = Sequence
+
+
+def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
+    """Iterator for ``FoundCandidates``.
+
+    This iterator is used when the package is not already installed. Candidates
+    from index come later in their normal ordering.
+    """
+    versions_found: Set[_BaseVersion] = set()
+    for version, func in infos:
+        if version in versions_found:
+            continue
+        candidate = func()
+        if candidate is None:
+            continue
+        yield candidate
+        versions_found.add(version)
+
+
+def _iter_built_with_prepended(
+    installed: Candidate, infos: Iterator[IndexCandidateInfo]
+) -> Iterator[Candidate]:
+    """Iterator for ``FoundCandidates``.
+
+    This iterator is used when the resolver prefers the already-installed
+    candidate and NOT to upgrade. The installed candidate is therefore
+    always yielded first, and candidates from index come later in their
+    normal ordering, except skipped when the version is already installed.
+    """
+    yield installed
+    versions_found: Set[_BaseVersion] = {installed.version}
+    for version, func in infos:
+        if version in versions_found:
+            continue
+        candidate = func()
+        if candidate is None:
+            continue
+        yield candidate
+        versions_found.add(version)
+
+
+def _iter_built_with_inserted(
+    installed: Candidate, infos: Iterator[IndexCandidateInfo]
+) -> Iterator[Candidate]:
+    """Iterator for ``FoundCandidates``.
+
+    This iterator is used when the resolver prefers to upgrade an
+    already-installed package. Candidates from index are returned in their
+    normal ordering, except replaced when the version is already installed.
+
+    The implementation iterates through and yields other candidates, inserting
+    the installed candidate exactly once before we start yielding older or
+    equivalent candidates, or after all other candidates if they are all newer.
+    """
+    versions_found: Set[_BaseVersion] = set()
+    for version, func in infos:
+        if version in versions_found:
+            continue
+        # If the installed candidate is better, yield it first.
+        if installed.version >= version:
+            yield installed
+            versions_found.add(installed.version)
+        candidate = func()
+        if candidate is None:
+            continue
+        yield candidate
+        versions_found.add(version)
+
+    # If the installed candidate is older than all other candidates.
+    if installed.version not in versions_found:
+        yield installed
+
+
+class FoundCandidates(SequenceCandidate):
+    """A lazy sequence to provide candidates to the resolver.
+
+    The intended usage is to return this from `find_matches()` so the resolver
+    can iterate through the sequence multiple times, but only access the index
+    page when remote packages are actually needed. This improve performances
+    when suitable candidates are already installed on disk.
+    """
+
+    def __init__(
+        self,
+        get_infos: Callable[[], Iterator[IndexCandidateInfo]],
+        installed: Optional[Candidate],
+        prefers_installed: bool,
+        incompatible_ids: Set[int],
+    ):
+        self._get_infos = get_infos
+        self._installed = installed
+        self._prefers_installed = prefers_installed
+        self._incompatible_ids = incompatible_ids
+
+    def __getitem__(self, index: Any) -> Any:
+        # Implemented to satisfy the ABC check. This is not needed by the
+        # resolver, and should not be used by the provider either (for
+        # performance reasons).
+        raise NotImplementedError("don't do this")
+
+    def __iter__(self) -> Iterator[Candidate]:
+        infos = self._get_infos()
+        if not self._installed:
+            iterator = _iter_built(infos)
+        elif self._prefers_installed:
+            iterator = _iter_built_with_prepended(self._installed, infos)
+        else:
+            iterator = _iter_built_with_inserted(self._installed, infos)
+        return (c for c in iterator if id(c) not in self._incompatible_ids)
+
+    def __len__(self) -> int:
+        # Implemented to satisfy the ABC check. This is not needed by the
+        # resolver, and should not be used by the provider either (for
+        # performance reasons).
+        raise NotImplementedError("don't do this")
+
+    @functools.lru_cache(maxsize=1)
+    def __bool__(self) -> bool:
+        if self._prefers_installed and self._installed:
+            return True
+        return any(self)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/provider.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/provider.py
new file mode 100644
index 0000000..6300dfc
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/provider.py
@@ -0,0 +1,248 @@
+import collections
+import math
+from typing import (
+    TYPE_CHECKING,
+    Dict,
+    Iterable,
+    Iterator,
+    Mapping,
+    Sequence,
+    TypeVar,
+    Union,
+)
+
+from pip._vendor.resolvelib.providers import AbstractProvider
+
+from .base import Candidate, Constraint, Requirement
+from .candidates import REQUIRES_PYTHON_IDENTIFIER
+from .factory import Factory
+
+if TYPE_CHECKING:
+    from pip._vendor.resolvelib.providers import Preference
+    from pip._vendor.resolvelib.resolvers import RequirementInformation
+
+    PreferenceInformation = RequirementInformation[Requirement, Candidate]
+
+    _ProviderBase = AbstractProvider[Requirement, Candidate, str]
+else:
+    _ProviderBase = AbstractProvider
+
+# Notes on the relationship between the provider, the factory, and the
+# candidate and requirement classes.
+#
+# The provider is a direct implementation of the resolvelib class. Its role
+# is to deliver the API that resolvelib expects.
+#
+# Rather than work with completely abstract "requirement" and "candidate"
+# concepts as resolvelib does, pip has concrete classes implementing these two
+# ideas. The API of Requirement and Candidate objects are defined in the base
+# classes, but essentially map fairly directly to the equivalent provider
+# methods. In particular, `find_matches` and `is_satisfied_by` are
+# requirement methods, and `get_dependencies` is a candidate method.
+#
+# The factory is the interface to pip's internal mechanisms. It is stateless,
+# and is created by the resolver and held as a property of the provider. It is
+# responsible for creating Requirement and Candidate objects, and provides
+# services to those objects (access to pip's finder and preparer).
+
+
+D = TypeVar("D")
+V = TypeVar("V")
+
+
+def _get_with_identifier(
+    mapping: Mapping[str, V],
+    identifier: str,
+    default: D,
+) -> Union[D, V]:
+    """Get item from a package name lookup mapping with a resolver identifier.
+
+    This extra logic is needed when the target mapping is keyed by package
+    name, which cannot be directly looked up with an identifier (which may
+    contain requested extras). Additional logic is added to also look up a value
+    by "cleaning up" the extras from the identifier.
+    """
+    if identifier in mapping:
+        return mapping[identifier]
+    # HACK: Theoretically we should check whether this identifier is a valid
+    # "NAME[EXTRAS]" format, and parse out the name part with packaging or
+    # some regular expression. But since pip's resolver only spits out three
+    # kinds of identifiers: normalized PEP 503 names, normalized names plus
+    # extras, and Requires-Python, we can cheat a bit here.
+    name, open_bracket, _ = identifier.partition("[")
+    if open_bracket and name in mapping:
+        return mapping[name]
+    return default
+
+
+class PipProvider(_ProviderBase):
+    """Pip's provider implementation for resolvelib.
+
+    :params constraints: A mapping of constraints specified by the user. Keys
+        are canonicalized project names.
+    :params ignore_dependencies: Whether the user specified ``--no-deps``.
+    :params upgrade_strategy: The user-specified upgrade strategy.
+    :params user_requested: A set of canonicalized package names that the user
+        supplied for pip to install/upgrade.
+    """
+
+    def __init__(
+        self,
+        factory: Factory,
+        constraints: Dict[str, Constraint],
+        ignore_dependencies: bool,
+        upgrade_strategy: str,
+        user_requested: Dict[str, int],
+    ) -> None:
+        self._factory = factory
+        self._constraints = constraints
+        self._ignore_dependencies = ignore_dependencies
+        self._upgrade_strategy = upgrade_strategy
+        self._user_requested = user_requested
+        self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf)
+
+    def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str:
+        return requirement_or_candidate.name
+
+    def get_preference(  # type: ignore
+        self,
+        identifier: str,
+        resolutions: Mapping[str, Candidate],
+        candidates: Mapping[str, Iterator[Candidate]],
+        information: Mapping[str, Iterable["PreferenceInformation"]],
+        backtrack_causes: Sequence["PreferenceInformation"],
+    ) -> "Preference":
+        """Produce a sort key for given requirement based on preference.
+
+        The lower the return value is, the more preferred this group of
+        arguments is.
+
+        Currently pip considers the following in order:
+
+        * Prefer if any of the known requirements is "direct", e.g. points to an
+          explicit URL.
+        * If equal, prefer if any requirement is "pinned", i.e. contains
+          operator ``===`` or ``==``.
+        * If equal, calculate an approximate "depth" and resolve requirements
+          closer to the user-specified requirements first.
+        * Order user-specified requirements by the order they are specified.
+        * If equal, prefers "non-free" requirements, i.e. contains at least one
+          operator, such as ``>=`` or ``<``.
+        * If equal, order alphabetically for consistency (helps debuggability).
+        """
+        lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
+        candidate, ireqs = zip(*lookups)
+        operators = [
+            specifier.operator
+            for specifier_set in (ireq.specifier for ireq in ireqs if ireq)
+            for specifier in specifier_set
+        ]
+
+        direct = candidate is not None
+        pinned = any(op[:2] == "==" for op in operators)
+        unfree = bool(operators)
+
+        try:
+            requested_order: Union[int, float] = self._user_requested[identifier]
+        except KeyError:
+            requested_order = math.inf
+            parent_depths = (
+                self._known_depths[parent.name] if parent is not None else 0.0
+                for _, parent in information[identifier]
+            )
+            inferred_depth = min(d for d in parent_depths) + 1.0
+        else:
+            inferred_depth = 1.0
+        self._known_depths[identifier] = inferred_depth
+
+        requested_order = self._user_requested.get(identifier, math.inf)
+
+        # Requires-Python has only one candidate and the check is basically
+        # free, so we always do it first to avoid needless work if it fails.
+        requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER
+
+        # HACK: Setuptools have a very long and solid backward compatibility
+        # track record, and extremely few projects would request a narrow,
+        # non-recent version range of it since that would break a lot things.
+        # (Most projects specify it only to request for an installer feature,
+        # which does not work, but that's another topic.) Intentionally
+        # delaying Setuptools helps reduce branches the resolver has to check.
+        # This serves as a temporary fix for issues like "apache-airflow[all]"
+        # while we work on "proper" branch pruning techniques.
+        delay_this = identifier == "setuptools"
+
+        # Prefer the causes of backtracking on the assumption that the problem
+        # resolving the dependency tree is related to the failures that caused
+        # the backtracking
+        backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes)
+
+        return (
+            not requires_python,
+            delay_this,
+            not direct,
+            not pinned,
+            not backtrack_cause,
+            inferred_depth,
+            requested_order,
+            not unfree,
+            identifier,
+        )
+
+    def find_matches(
+        self,
+        identifier: str,
+        requirements: Mapping[str, Iterator[Requirement]],
+        incompatibilities: Mapping[str, Iterator[Candidate]],
+    ) -> Iterable[Candidate]:
+        def _eligible_for_upgrade(identifier: str) -> bool:
+            """Are upgrades allowed for this project?
+
+            This checks the upgrade strategy, and whether the project was one
+            that the user specified in the command line, in order to decide
+            whether we should upgrade if there's a newer version available.
+
+            (Note that we don't need access to the `--upgrade` flag, because
+            an upgrade strategy of "to-satisfy-only" means that `--upgrade`
+            was not specified).
+            """
+            if self._upgrade_strategy == "eager":
+                return True
+            elif self._upgrade_strategy == "only-if-needed":
+                user_order = _get_with_identifier(
+                    self._user_requested,
+                    identifier,
+                    default=None,
+                )
+                return user_order is not None
+            return False
+
+        constraint = _get_with_identifier(
+            self._constraints,
+            identifier,
+            default=Constraint.empty(),
+        )
+        return self._factory.find_candidates(
+            identifier=identifier,
+            requirements=requirements,
+            constraint=constraint,
+            prefers_installed=(not _eligible_for_upgrade(identifier)),
+            incompatibilities=incompatibilities,
+        )
+
+    def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool:
+        return requirement.is_satisfied_by(candidate)
+
+    def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]:
+        with_requires = not self._ignore_dependencies
+        return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
+
+    @staticmethod
+    def is_backtrack_cause(
+        identifier: str, backtrack_causes: Sequence["PreferenceInformation"]
+    ) -> bool:
+        for backtrack_cause in backtrack_causes:
+            if identifier == backtrack_cause.requirement.name:
+                return True
+            if backtrack_cause.parent and identifier == backtrack_cause.parent.name:
+                return True
+        return False
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/reporter.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/reporter.py
new file mode 100644
index 0000000..6ced532
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/reporter.py
@@ -0,0 +1,68 @@
+from collections import defaultdict
+from logging import getLogger
+from typing import Any, DefaultDict
+
+from pip._vendor.resolvelib.reporters import BaseReporter
+
+from .base import Candidate, Requirement
+
+logger = getLogger(__name__)
+
+
+class PipReporter(BaseReporter):
+    def __init__(self) -> None:
+        self.backtracks_by_package: DefaultDict[str, int] = defaultdict(int)
+
+        self._messages_at_backtrack = {
+            1: (
+                "pip is looking at multiple versions of {package_name} to "
+                "determine which version is compatible with other "
+                "requirements. This could take a while."
+            ),
+            8: (
+                "pip is looking at multiple versions of {package_name} to "
+                "determine which version is compatible with other "
+                "requirements. This could take a while."
+            ),
+            13: (
+                "This is taking longer than usual. You might need to provide "
+                "the dependency resolver with stricter constraints to reduce "
+                "runtime. See https://pip.pypa.io/warnings/backtracking for "
+                "guidance. If you want to abort this run, press Ctrl + C."
+            ),
+        }
+
+    def backtracking(self, candidate: Candidate) -> None:
+        self.backtracks_by_package[candidate.name] += 1
+
+        count = self.backtracks_by_package[candidate.name]
+        if count not in self._messages_at_backtrack:
+            return
+
+        message = self._messages_at_backtrack[count]
+        logger.info("INFO: %s", message.format(package_name=candidate.name))
+
+
+class PipDebuggingReporter(BaseReporter):
+    """A reporter that does an info log for every event it sees."""
+
+    def starting(self) -> None:
+        logger.info("Reporter.starting()")
+
+    def starting_round(self, index: int) -> None:
+        logger.info("Reporter.starting_round(%r)", index)
+
+    def ending_round(self, index: int, state: Any) -> None:
+        logger.info("Reporter.ending_round(%r, state)", index)
+
+    def ending(self, state: Any) -> None:
+        logger.info("Reporter.ending(%r)", state)
+
+    def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None:
+        logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent)
+
+    def backtracking(self, candidate: Candidate) -> None:
+        logger.info("Reporter.backtracking(%r)", candidate)
+
+    def pinning(self, candidate: Candidate) -> None:
+        logger.info("Reporter.pinning(%r)", candidate)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/requirements.py
new file mode 100644
index 0000000..f561f1f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/requirements.py
@@ -0,0 +1,166 @@
+from pip._vendor.packaging.specifiers import SpecifierSet
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+
+from pip._internal.req.req_install import InstallRequirement
+
+from .base import Candidate, CandidateLookup, Requirement, format_name
+
+
+class ExplicitRequirement(Requirement):
+    def __init__(self, candidate: Candidate) -> None:
+        self.candidate = candidate
+
+    def __str__(self) -> str:
+        return str(self.candidate)
+
+    def __repr__(self) -> str:
+        return "{class_name}({candidate!r})".format(
+            class_name=self.__class__.__name__,
+            candidate=self.candidate,
+        )
+
+    @property
+    def project_name(self) -> NormalizedName:
+        # No need to canonicalize - the candidate did this
+        return self.candidate.project_name
+
+    @property
+    def name(self) -> str:
+        # No need to canonicalize - the candidate did this
+        return self.candidate.name
+
+    def format_for_error(self) -> str:
+        return self.candidate.format_for_error()
+
+    def get_candidate_lookup(self) -> CandidateLookup:
+        return self.candidate, None
+
+    def is_satisfied_by(self, candidate: Candidate) -> bool:
+        return candidate == self.candidate
+
+
+class SpecifierRequirement(Requirement):
+    def __init__(self, ireq: InstallRequirement) -> None:
+        assert ireq.link is None, "This is a link, not a specifier"
+        self._ireq = ireq
+        self._extras = frozenset(ireq.extras)
+
+    def __str__(self) -> str:
+        return str(self._ireq.req)
+
+    def __repr__(self) -> str:
+        return "{class_name}({requirement!r})".format(
+            class_name=self.__class__.__name__,
+            requirement=str(self._ireq.req),
+        )
+
+    @property
+    def project_name(self) -> NormalizedName:
+        assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
+        return canonicalize_name(self._ireq.req.name)
+
+    @property
+    def name(self) -> str:
+        return format_name(self.project_name, self._extras)
+
+    def format_for_error(self) -> str:
+
+        # Convert comma-separated specifiers into "A, B, ..., F and G"
+        # This makes the specifier a bit more "human readable", without
+        # risking a change in meaning. (Hopefully! Not all edge cases have
+        # been checked)
+        parts = [s.strip() for s in str(self).split(",")]
+        if len(parts) == 0:
+            return ""
+        elif len(parts) == 1:
+            return parts[0]
+
+        return ", ".join(parts[:-1]) + " and " + parts[-1]
+
+    def get_candidate_lookup(self) -> CandidateLookup:
+        return None, self._ireq
+
+    def is_satisfied_by(self, candidate: Candidate) -> bool:
+        assert candidate.name == self.name, (
+            f"Internal issue: Candidate is not for this requirement "
+            f"{candidate.name} vs {self.name}"
+        )
+        # We can safely always allow prereleases here since PackageFinder
+        # already implements the prerelease logic, and would have filtered out
+        # prerelease candidates if the user does not expect them.
+        assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
+        spec = self._ireq.req.specifier
+        return spec.contains(candidate.version, prereleases=True)
+
+
+class RequiresPythonRequirement(Requirement):
+    """A requirement representing Requires-Python metadata."""
+
+    def __init__(self, specifier: SpecifierSet, match: Candidate) -> None:
+        self.specifier = specifier
+        self._candidate = match
+
+    def __str__(self) -> str:
+        return f"Python {self.specifier}"
+
+    def __repr__(self) -> str:
+        return "{class_name}({specifier!r})".format(
+            class_name=self.__class__.__name__,
+            specifier=str(self.specifier),
+        )
+
+    @property
+    def project_name(self) -> NormalizedName:
+        return self._candidate.project_name
+
+    @property
+    def name(self) -> str:
+        return self._candidate.name
+
+    def format_for_error(self) -> str:
+        return str(self)
+
+    def get_candidate_lookup(self) -> CandidateLookup:
+        if self.specifier.contains(self._candidate.version, prereleases=True):
+            return self._candidate, None
+        return None, None
+
+    def is_satisfied_by(self, candidate: Candidate) -> bool:
+        assert candidate.name == self._candidate.name, "Not Python candidate"
+        # We can safely always allow prereleases here since PackageFinder
+        # already implements the prerelease logic, and would have filtered out
+        # prerelease candidates if the user does not expect them.
+        return self.specifier.contains(candidate.version, prereleases=True)
+
+
+class UnsatisfiableRequirement(Requirement):
+    """A requirement that cannot be satisfied."""
+
+    def __init__(self, name: NormalizedName) -> None:
+        self._name = name
+
+    def __str__(self) -> str:
+        return f"{self._name} (unavailable)"
+
+    def __repr__(self) -> str:
+        return "{class_name}({name!r})".format(
+            class_name=self.__class__.__name__,
+            name=str(self._name),
+        )
+
+    @property
+    def project_name(self) -> NormalizedName:
+        return self._name
+
+    @property
+    def name(self) -> str:
+        return self._name
+
+    def format_for_error(self) -> str:
+        return str(self)
+
+    def get_candidate_lookup(self) -> CandidateLookup:
+        return None, None
+
+    def is_satisfied_by(self, candidate: Candidate) -> bool:
+        return False
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/resolver.py
new file mode 100644
index 0000000..a605d6c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/resolution/resolvelib/resolver.py
@@ -0,0 +1,296 @@
+import functools
+import logging
+import os
+from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible
+from pip._vendor.resolvelib import Resolver as RLResolver
+from pip._vendor.resolvelib.structs import DirectedGraph
+
+from pip._internal.cache import WheelCache
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.req.req_set import RequirementSet
+from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
+from pip._internal.resolution.resolvelib.provider import PipProvider
+from pip._internal.resolution.resolvelib.reporter import (
+    PipDebuggingReporter,
+    PipReporter,
+)
+
+from .base import Candidate, Requirement
+from .factory import Factory
+
+if TYPE_CHECKING:
+    from pip._vendor.resolvelib.resolvers import Result as RLResult
+
+    Result = RLResult[Requirement, Candidate, str]
+
+
+logger = logging.getLogger(__name__)
+
+
+class Resolver(BaseResolver):
+    _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
+
+    def __init__(
+        self,
+        preparer: RequirementPreparer,
+        finder: PackageFinder,
+        wheel_cache: Optional[WheelCache],
+        make_install_req: InstallRequirementProvider,
+        use_user_site: bool,
+        ignore_dependencies: bool,
+        ignore_installed: bool,
+        ignore_requires_python: bool,
+        force_reinstall: bool,
+        upgrade_strategy: str,
+        py_version_info: Optional[Tuple[int, ...]] = None,
+    ):
+        super().__init__()
+        assert upgrade_strategy in self._allowed_strategies
+
+        self.factory = Factory(
+            finder=finder,
+            preparer=preparer,
+            make_install_req=make_install_req,
+            wheel_cache=wheel_cache,
+            use_user_site=use_user_site,
+            force_reinstall=force_reinstall,
+            ignore_installed=ignore_installed,
+            ignore_requires_python=ignore_requires_python,
+            py_version_info=py_version_info,
+        )
+        self.ignore_dependencies = ignore_dependencies
+        self.upgrade_strategy = upgrade_strategy
+        self._result: Optional[Result] = None
+
+    def resolve(
+        self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
+    ) -> RequirementSet:
+        collected = self.factory.collect_root_requirements(root_reqs)
+        provider = PipProvider(
+            factory=self.factory,
+            constraints=collected.constraints,
+            ignore_dependencies=self.ignore_dependencies,
+            upgrade_strategy=self.upgrade_strategy,
+            user_requested=collected.user_requested,
+        )
+        if "PIP_RESOLVER_DEBUG" in os.environ:
+            reporter: BaseReporter = PipDebuggingReporter()
+        else:
+            reporter = PipReporter()
+        resolver: RLResolver[Requirement, Candidate, str] = RLResolver(
+            provider,
+            reporter,
+        )
+
+        try:
+            try_to_avoid_resolution_too_deep = 2000000
+            result = self._result = resolver.resolve(
+                collected.requirements, max_rounds=try_to_avoid_resolution_too_deep
+            )
+
+        except ResolutionImpossible as e:
+            error = self.factory.get_installation_error(
+                cast("ResolutionImpossible[Requirement, Candidate]", e),
+                collected.constraints,
+            )
+            raise error from e
+
+        req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
+        for candidate in result.mapping.values():
+            ireq = candidate.get_install_requirement()
+            if ireq is None:
+                continue
+
+            # Check if there is already an installation under the same name,
+            # and set a flag for later stages to uninstall it, if needed.
+            installed_dist = self.factory.get_dist_to_uninstall(candidate)
+            if installed_dist is None:
+                # There is no existing installation -- nothing to uninstall.
+                ireq.should_reinstall = False
+            elif self.factory.force_reinstall:
+                # The --force-reinstall flag is set -- reinstall.
+                ireq.should_reinstall = True
+            elif installed_dist.version != candidate.version:
+                # The installation is different in version -- reinstall.
+                ireq.should_reinstall = True
+            elif candidate.is_editable or installed_dist.editable:
+                # The incoming distribution is editable, or different in
+                # editable-ness to installation -- reinstall.
+                ireq.should_reinstall = True
+            elif candidate.source_link and candidate.source_link.is_file:
+                # The incoming distribution is under file://
+                if candidate.source_link.is_wheel:
+                    # is a local wheel -- do nothing.
+                    logger.info(
+                        "%s is already installed with the same version as the "
+                        "provided wheel. Use --force-reinstall to force an "
+                        "installation of the wheel.",
+                        ireq.name,
+                    )
+                    continue
+
+                # is a local sdist or path -- reinstall
+                ireq.should_reinstall = True
+            else:
+                continue
+
+            link = candidate.source_link
+            if link and link.is_yanked:
+                # The reason can contain non-ASCII characters, Unicode
+                # is required for Python 2.
+                msg = (
+                    "The candidate selected for download or install is a "
+                    "yanked version: {name!r} candidate (version {version} "
+                    "at {link})\nReason for being yanked: {reason}"
+                ).format(
+                    name=candidate.name,
+                    version=candidate.version,
+                    link=link,
+                    reason=link.yanked_reason or "<none given>",
+                )
+                logger.warning(msg)
+
+            req_set.add_named_requirement(ireq)
+
+        reqs = req_set.all_requirements
+        self.factory.preparer.prepare_linked_requirements_more(reqs)
+        return req_set
+
+    def get_installation_order(
+        self, req_set: RequirementSet
+    ) -> List[InstallRequirement]:
+        """Get order for installation of requirements in RequirementSet.
+
+        The returned list contains a requirement before another that depends on
+        it. This helps ensure that the environment is kept consistent as they
+        get installed one-by-one.
+
+        The current implementation creates a topological ordering of the
+        dependency graph, giving more weight to packages with less
+        or no dependencies, while breaking any cycles in the graph at
+        arbitrary points. We make no guarantees about where the cycle
+        would be broken, other than it *would* be broken.
+        """
+        assert self._result is not None, "must call resolve() first"
+
+        if not req_set.requirements:
+            # Nothing is left to install, so we do not need an order.
+            return []
+
+        graph = self._result.graph
+        weights = get_topological_weights(graph, set(req_set.requirements.keys()))
+
+        sorted_items = sorted(
+            req_set.requirements.items(),
+            key=functools.partial(_req_set_item_sorter, weights=weights),
+            reverse=True,
+        )
+        return [ireq for _, ireq in sorted_items]
+
+
+def get_topological_weights(
+    graph: "DirectedGraph[Optional[str]]", requirement_keys: Set[str]
+) -> Dict[Optional[str], int]:
+    """Assign weights to each node based on how "deep" they are.
+
+    This implementation may change at any point in the future without prior
+    notice.
+
+    We first simplify the dependency graph by pruning any leaves and giving them
+    the highest weight: a package without any dependencies should be installed
+    first. This is done again and again in the same way, giving ever less weight
+    to the newly found leaves. The loop stops when no leaves are left: all
+    remaining packages have at least one dependency left in the graph.
+
+    Then we continue with the remaining graph, by taking the length for the
+    longest path to any node from root, ignoring any paths that contain a single
+    node twice (i.e. cycles). This is done through a depth-first search through
+    the graph, while keeping track of the path to the node.
+
+    Cycles in the graph result would result in node being revisited while also
+    being on its own path. In this case, take no action. This helps ensure we
+    don't get stuck in a cycle.
+
+    When assigning weight, the longer path (i.e. larger length) is preferred.
+
+    We are only interested in the weights of packages that are in the
+    requirement_keys.
+    """
+    path: Set[Optional[str]] = set()
+    weights: Dict[Optional[str], int] = {}
+
+    def visit(node: Optional[str]) -> None:
+        if node in path:
+            # We hit a cycle, so we'll break it here.
+            return
+
+        # Time to visit the children!
+        path.add(node)
+        for child in graph.iter_children(node):
+            visit(child)
+        path.remove(node)
+
+        if node not in requirement_keys:
+            return
+
+        last_known_parent_count = weights.get(node, 0)
+        weights[node] = max(last_known_parent_count, len(path))
+
+    # Simplify the graph, pruning leaves that have no dependencies.
+    # This is needed for large graphs (say over 200 packages) because the
+    # `visit` function is exponentially slower then, taking minutes.
+    # See https://github.com/pypa/pip/issues/10557
+    # We will loop until we explicitly break the loop.
+    while True:
+        leaves = set()
+        for key in graph:
+            if key is None:
+                continue
+            for _child in graph.iter_children(key):
+                # This means we have at least one child
+                break
+            else:
+                # No child.
+                leaves.add(key)
+        if not leaves:
+            # We are done simplifying.
+            break
+        # Calculate the weight for the leaves.
+        weight = len(graph) - 1
+        for leaf in leaves:
+            if leaf not in requirement_keys:
+                continue
+            weights[leaf] = weight
+        # Remove the leaves from the graph, making it simpler.
+        for leaf in leaves:
+            graph.remove(leaf)
+
+    # Visit the remaining graph.
+    # `None` is guaranteed to be the root node by resolvelib.
+    visit(None)
+
+    # Sanity check: all requirement keys should be in the weights,
+    # and no other keys should be in the weights.
+    difference = set(weights.keys()).difference(requirement_keys)
+    assert not difference, difference
+
+    return weights
+
+
+def _req_set_item_sorter(
+    item: Tuple[str, InstallRequirement],
+    weights: Dict[Optional[str], int],
+) -> Tuple[int, str]:
+    """Key function used to sort install requirements for installation.
+
+    Based on the "weight" mapping calculated in ``get_installation_order()``.
+    The canonical package name is returned as the second member as a tie-
+    breaker to ensure the result is predictable, which is useful in tests.
+    """
+    name = canonicalize_name(item[0])
+    return weights[name], name
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/self_outdated_check.py b/venv/lib/python3.9/site-packages/pip/_internal/self_outdated_check.py
new file mode 100644
index 0000000..9e2149c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/self_outdated_check.py
@@ -0,0 +1,239 @@
+import datetime
+import functools
+import hashlib
+import json
+import logging
+import optparse
+import os.path
+import sys
+from dataclasses import dataclass
+from typing import Any, Callable, Dict, Optional
+
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.rich.console import Group
+from pip._vendor.rich.markup import escape
+from pip._vendor.rich.text import Text
+
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import get_default_environment
+from pip._internal.metadata.base import DistributionVersion
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.network.session import PipSession
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.entrypoints import (
+    get_best_invocation_for_this_pip,
+    get_best_invocation_for_this_python,
+)
+from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
+from pip._internal.utils.misc import ensure_dir
+
+_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
+
+
+logger = logging.getLogger(__name__)
+
+
+def _get_statefile_name(key: str) -> str:
+    key_bytes = key.encode()
+    name = hashlib.sha224(key_bytes).hexdigest()
+    return name
+
+
+class SelfCheckState:
+    def __init__(self, cache_dir: str) -> None:
+        self._state: Dict[str, Any] = {}
+        self._statefile_path = None
+
+        # Try to load the existing state
+        if cache_dir:
+            self._statefile_path = os.path.join(
+                cache_dir, "selfcheck", _get_statefile_name(self.key)
+            )
+            try:
+                with open(self._statefile_path, encoding="utf-8") as statefile:
+                    self._state = json.load(statefile)
+            except (OSError, ValueError, KeyError):
+                # Explicitly suppressing exceptions, since we don't want to
+                # error out if the cache file is invalid.
+                pass
+
+    @property
+    def key(self) -> str:
+        return sys.prefix
+
+    def get(self, current_time: datetime.datetime) -> Optional[str]:
+        """Check if we have a not-outdated version loaded already."""
+        if not self._state:
+            return None
+
+        if "last_check" not in self._state:
+            return None
+
+        if "pypi_version" not in self._state:
+            return None
+
+        seven_days_in_seconds = 7 * 24 * 60 * 60
+
+        # Determine if we need to refresh the state
+        last_check = datetime.datetime.strptime(self._state["last_check"], _DATE_FMT)
+        seconds_since_last_check = (current_time - last_check).total_seconds()
+        if seconds_since_last_check > seven_days_in_seconds:
+            return None
+
+        return self._state["pypi_version"]
+
+    def set(self, pypi_version: str, current_time: datetime.datetime) -> None:
+        # If we do not have a path to cache in, don't bother saving.
+        if not self._statefile_path:
+            return
+
+        # Check to make sure that we own the directory
+        if not check_path_owner(os.path.dirname(self._statefile_path)):
+            return
+
+        # Now that we've ensured the directory is owned by this user, we'll go
+        # ahead and make sure that all our directories are created.
+        ensure_dir(os.path.dirname(self._statefile_path))
+
+        state = {
+            # Include the key so it's easy to tell which pip wrote the
+            # file.
+            "key": self.key,
+            "last_check": current_time.strftime(_DATE_FMT),
+            "pypi_version": pypi_version,
+        }
+
+        text = json.dumps(state, sort_keys=True, separators=(",", ":"))
+
+        with adjacent_tmp_file(self._statefile_path) as f:
+            f.write(text.encode())
+
+        try:
+            # Since we have a prefix-specific state file, we can just
+            # overwrite whatever is there, no need to check.
+            replace(f.name, self._statefile_path)
+        except OSError:
+            # Best effort.
+            pass
+
+
+@dataclass
+class UpgradePrompt:
+    old: str
+    new: str
+
+    def __rich__(self) -> Group:
+        if WINDOWS:
+            pip_cmd = f"{get_best_invocation_for_this_python()} -m pip"
+        else:
+            pip_cmd = get_best_invocation_for_this_pip()
+
+        notice = "[bold][[reset][blue]notice[reset][bold]][reset]"
+        return Group(
+            Text(),
+            Text.from_markup(
+                f"{notice} A new release of pip available: "
+                f"[red]{self.old}[reset] -> [green]{self.new}[reset]"
+            ),
+            Text.from_markup(
+                f"{notice} To update, run: "
+                f"[green]{escape(pip_cmd)} install --upgrade pip"
+            ),
+        )
+
+
+def was_installed_by_pip(pkg: str) -> bool:
+    """Checks whether pkg was installed by pip
+
+    This is used not to display the upgrade message when pip is in fact
+    installed by system package manager, such as dnf on Fedora.
+    """
+    dist = get_default_environment().get_distribution(pkg)
+    return dist is not None and "pip" == dist.installer
+
+
+def _get_current_remote_pip_version(
+    session: PipSession, options: optparse.Values
+) -> str:
+    # Lets use PackageFinder to see what the latest pip version is
+    link_collector = LinkCollector.create(
+        session,
+        options=options,
+        suppress_no_index=True,
+    )
+
+    # Pass allow_yanked=False so we don't suggest upgrading to a
+    # yanked version.
+    selection_prefs = SelectionPreferences(
+        allow_yanked=False,
+        allow_all_prereleases=False,  # Explicitly set to False
+    )
+
+    finder = PackageFinder.create(
+        link_collector=link_collector,
+        selection_prefs=selection_prefs,
+    )
+    best_candidate = finder.find_best_candidate("pip").best_candidate
+    if best_candidate is None:
+        return
+
+    return str(best_candidate.version)
+
+
+def _self_version_check_logic(
+    *,
+    state: SelfCheckState,
+    current_time: datetime.datetime,
+    local_version: DistributionVersion,
+    get_remote_version: Callable[[], str],
+) -> Optional[UpgradePrompt]:
+    remote_version_str = state.get(current_time)
+    if remote_version_str is None:
+        remote_version_str = get_remote_version()
+        state.set(remote_version_str, current_time)
+
+    remote_version = parse_version(remote_version_str)
+    logger.debug("Remote version of pip: %s", remote_version)
+    logger.debug("Local version of pip:  %s", local_version)
+
+    pip_installed_by_pip = was_installed_by_pip("pip")
+    logger.debug("Was pip installed by pip? %s", pip_installed_by_pip)
+    if not pip_installed_by_pip:
+        return None  # Only suggest upgrade if pip is installed by pip.
+
+    local_version_is_older = (
+        local_version < remote_version
+        and local_version.base_version != remote_version.base_version
+    )
+    if local_version_is_older:
+        return UpgradePrompt(old=str(local_version), new=remote_version_str)
+
+    return None
+
+
+def pip_self_version_check(session: PipSession, options: optparse.Values) -> None:
+    """Check for an update for pip.
+
+    Limit the frequency of checks to once per week. State is stored either in
+    the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
+    of the pip script path.
+    """
+    installed_dist = get_default_environment().get_distribution("pip")
+    if not installed_dist:
+        return
+
+    try:
+        upgrade_prompt = _self_version_check_logic(
+            state=SelfCheckState(cache_dir=options.cache_dir),
+            current_time=datetime.datetime.utcnow(),
+            local_version=installed_dist.version,
+            get_remote_version=functools.partial(
+                _get_current_remote_pip_version, session, options
+            ),
+        )
+        if upgrade_prompt is not None:
+            logger.warning("[present-rich] %s", upgrade_prompt)
+    except Exception:
+        logger.warning("There was an error checking the latest version of pip.")
+        logger.debug("See below for error", exc_info=True)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__init__.py
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..c3dd83c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/_log.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/_log.cpython-39.pyc
new file mode 100644
index 0000000..f6a89c3
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/_log.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc
new file mode 100644
index 0000000..5dbdbf6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compat.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compat.cpython-39.pyc
new file mode 100644
index 0000000..4c11ff6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compat.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc
new file mode 100644
index 0000000..e2eb2fe
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-39.pyc
new file mode 100644
index 0000000..6763aec
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc
new file mode 100644
index 0000000..0799aaa
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc
new file mode 100644
index 0000000..c290096
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc
new file mode 100644
index 0000000..98f3f43
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc
new file mode 100644
index 0000000..7733cbb
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-39.pyc
new file mode 100644
index 0000000..c52d731
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc
new file mode 100644
index 0000000..c4ea2a9
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc
new file mode 100644
index 0000000..fe33ed8
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc
new file mode 100644
index 0000000..db68018
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-39.pyc
new file mode 100644
index 0000000..164c38c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-39.pyc
new file mode 100644
index 0000000..98253ba
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc
new file mode 100644
index 0000000..03497c6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/logging.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/logging.cpython-39.pyc
new file mode 100644
index 0000000..354b0db
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/logging.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/misc.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/misc.cpython-39.pyc
new file mode 100644
index 0000000..3811eee
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/misc.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/models.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/models.cpython-39.pyc
new file mode 100644
index 0000000..7949a33
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/models.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-39.pyc
new file mode 100644
index 0000000..22a4250
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc
new file mode 100644
index 0000000..507109b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc
new file mode 100644
index 0000000..605af37
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc
new file mode 100644
index 0000000..522e6a6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc
new file mode 100644
index 0000000..a85b552
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/urls.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/urls.cpython-39.pyc
new file mode 100644
index 0000000..f16d94f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/urls.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc
new file mode 100644
index 0000000..de653f2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-39.pyc
new file mode 100644
index 0000000..99d0ca7
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/_log.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/_log.py
new file mode 100644
index 0000000..92c4c6a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/_log.py
@@ -0,0 +1,38 @@
+"""Customize logging
+
+Defines custom logger class for the `logger.verbose(...)` method.
+
+init_logging() must be called before any other modules that call logging.getLogger.
+"""
+
+import logging
+from typing import Any, cast
+
+# custom log level for `--verbose` output
+# between DEBUG and INFO
+VERBOSE = 15
+
+
+class VerboseLogger(logging.Logger):
+    """Custom Logger, defining a verbose log-level
+
+    VERBOSE is between INFO and DEBUG.
+    """
+
+    def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:
+        return self.log(VERBOSE, msg, *args, **kwargs)
+
+
+def getLogger(name: str) -> VerboseLogger:
+    """logging.getLogger, but ensures our VerboseLogger class is returned"""
+    return cast(VerboseLogger, logging.getLogger(name))
+
+
+def init_logging() -> None:
+    """Register our VerboseLogger and VERBOSE log level.
+
+    Should be called before any calls to getLogger(),
+    i.e. in pip._internal.__init__
+    """
+    logging.setLoggerClass(VerboseLogger)
+    logging.addLevelName(VERBOSE, "VERBOSE")
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/appdirs.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/appdirs.py
new file mode 100644
index 0000000..16933bf
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/appdirs.py
@@ -0,0 +1,52 @@
+"""
+This code wraps the vendored appdirs module to so the return values are
+compatible for the current pip code base.
+
+The intention is to rewrite current usages gradually, keeping the tests pass,
+and eventually drop this after all usages are changed.
+"""
+
+import os
+import sys
+from typing import List
+
+from pip._vendor import platformdirs as _appdirs
+
+
+def user_cache_dir(appname: str) -> str:
+    return _appdirs.user_cache_dir(appname, appauthor=False)
+
+
+def _macos_user_config_dir(appname: str, roaming: bool = True) -> str:
+    # Use ~/Application Support/pip, if the directory exists.
+    path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming)
+    if os.path.isdir(path):
+        return path
+
+    # Use a Linux-like ~/.config/pip, by default.
+    linux_like_path = "~/.config/"
+    if appname:
+        linux_like_path = os.path.join(linux_like_path, appname)
+
+    return os.path.expanduser(linux_like_path)
+
+
+def user_config_dir(appname: str, roaming: bool = True) -> str:
+    if sys.platform == "darwin":
+        return _macos_user_config_dir(appname, roaming)
+
+    return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
+
+
+# for the discussion regarding site_config_dir locations
+# see <https://github.com/pypa/pip/issues/1733>
+def site_config_dirs(appname: str) -> List[str]:
+    if sys.platform == "darwin":
+        return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)]
+
+    dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
+    if sys.platform == "win32":
+        return [dirval]
+
+    # Unix-y system. Look in /etc as well.
+    return dirval.split(os.pathsep) + ["/etc"]
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/compat.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/compat.py
new file mode 100644
index 0000000..3f4d300
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/compat.py
@@ -0,0 +1,63 @@
+"""Stuff that differs in different Python versions and platform
+distributions."""
+
+import logging
+import os
+import sys
+
+__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"]
+
+
+logger = logging.getLogger(__name__)
+
+
+def has_tls() -> bool:
+    try:
+        import _ssl  # noqa: F401  # ignore unused
+
+        return True
+    except ImportError:
+        pass
+
+    from pip._vendor.urllib3.util import IS_PYOPENSSL
+
+    return IS_PYOPENSSL
+
+
+def get_path_uid(path: str) -> int:
+    """
+    Return path's uid.
+
+    Does not follow symlinks:
+        https://github.com/pypa/pip/pull/935#discussion_r5307003
+
+    Placed this function in compat due to differences on AIX and
+    Jython, that should eventually go away.
+
+    :raises OSError: When path is a symlink or can't be read.
+    """
+    if hasattr(os, "O_NOFOLLOW"):
+        fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
+        file_uid = os.fstat(fd).st_uid
+        os.close(fd)
+    else:  # AIX and Jython
+        # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
+        if not os.path.islink(path):
+            # older versions of Jython don't have `os.fstat`
+            file_uid = os.stat(path).st_uid
+        else:
+            # raise OSError for parity with os.O_NOFOLLOW above
+            raise OSError(f"{path} is a symlink; Will not return uid for symlinks")
+    return file_uid
+
+
+# packages in the stdlib that may have installation metadata, but should not be
+# considered 'installed'.  this theoretically could be determined based on
+# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
+# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
+# make this ineffective, so hard-coding
+stdlib_pkgs = {"python", "wsgiref", "argparse"}
+
+
+# windows detection, covers cpython and ironpython
+WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/compatibility_tags.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/compatibility_tags.py
new file mode 100644
index 0000000..b6ed9a7
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/compatibility_tags.py
@@ -0,0 +1,165 @@
+"""Generate and work with PEP 425 Compatibility Tags.
+"""
+
+import re
+from typing import List, Optional, Tuple
+
+from pip._vendor.packaging.tags import (
+    PythonVersion,
+    Tag,
+    compatible_tags,
+    cpython_tags,
+    generic_tags,
+    interpreter_name,
+    interpreter_version,
+    mac_platforms,
+)
+
+_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")
+
+
+def version_info_to_nodot(version_info: Tuple[int, ...]) -> str:
+    # Only use up to the first two numbers.
+    return "".join(map(str, version_info[:2]))
+
+
+def _mac_platforms(arch: str) -> List[str]:
+    match = _osx_arch_pat.match(arch)
+    if match:
+        name, major, minor, actual_arch = match.groups()
+        mac_version = (int(major), int(minor))
+        arches = [
+            # Since we have always only checked that the platform starts
+            # with "macosx", for backwards-compatibility we extract the
+            # actual prefix provided by the user in case they provided
+            # something like "macosxcustom_". It may be good to remove
+            # this as undocumented or deprecate it in the future.
+            "{}_{}".format(name, arch[len("macosx_") :])
+            for arch in mac_platforms(mac_version, actual_arch)
+        ]
+    else:
+        # arch pattern didn't match (?!)
+        arches = [arch]
+    return arches
+
+
+def _custom_manylinux_platforms(arch: str) -> List[str]:
+    arches = [arch]
+    arch_prefix, arch_sep, arch_suffix = arch.partition("_")
+    if arch_prefix == "manylinux2014":
+        # manylinux1/manylinux2010 wheels run on most manylinux2014 systems
+        # with the exception of wheels depending on ncurses. PEP 599 states
+        # manylinux1/manylinux2010 wheels should be considered
+        # manylinux2014 wheels:
+        # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
+        if arch_suffix in {"i686", "x86_64"}:
+            arches.append("manylinux2010" + arch_sep + arch_suffix)
+            arches.append("manylinux1" + arch_sep + arch_suffix)
+    elif arch_prefix == "manylinux2010":
+        # manylinux1 wheels run on most manylinux2010 systems with the
+        # exception of wheels depending on ncurses. PEP 571 states
+        # manylinux1 wheels should be considered manylinux2010 wheels:
+        # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
+        arches.append("manylinux1" + arch_sep + arch_suffix)
+    return arches
+
+
+def _get_custom_platforms(arch: str) -> List[str]:
+    arch_prefix, arch_sep, arch_suffix = arch.partition("_")
+    if arch.startswith("macosx"):
+        arches = _mac_platforms(arch)
+    elif arch_prefix in ["manylinux2014", "manylinux2010"]:
+        arches = _custom_manylinux_platforms(arch)
+    else:
+        arches = [arch]
+    return arches
+
+
+def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]:
+    if not platforms:
+        return None
+
+    seen = set()
+    result = []
+
+    for p in platforms:
+        if p in seen:
+            continue
+        additions = [c for c in _get_custom_platforms(p) if c not in seen]
+        seen.update(additions)
+        result.extend(additions)
+
+    return result
+
+
+def _get_python_version(version: str) -> PythonVersion:
+    if len(version) > 1:
+        return int(version[0]), int(version[1:])
+    else:
+        return (int(version[0]),)
+
+
+def _get_custom_interpreter(
+    implementation: Optional[str] = None, version: Optional[str] = None
+) -> str:
+    if implementation is None:
+        implementation = interpreter_name()
+    if version is None:
+        version = interpreter_version()
+    return f"{implementation}{version}"
+
+
+def get_supported(
+    version: Optional[str] = None,
+    platforms: Optional[List[str]] = None,
+    impl: Optional[str] = None,
+    abis: Optional[List[str]] = None,
+) -> List[Tag]:
+    """Return a list of supported tags for each version specified in
+    `versions`.
+
+    :param version: a string version, of the form "33" or "32",
+        or None. The version will be assumed to support our ABI.
+    :param platform: specify a list of platforms you want valid
+        tags for, or None. If None, use the local system platform.
+    :param impl: specify the exact implementation you want valid
+        tags for, or None. If None, use the local interpreter impl.
+    :param abis: specify a list of abis you want valid
+        tags for, or None. If None, use the local interpreter abi.
+    """
+    supported: List[Tag] = []
+
+    python_version: Optional[PythonVersion] = None
+    if version is not None:
+        python_version = _get_python_version(version)
+
+    interpreter = _get_custom_interpreter(impl, version)
+
+    platforms = _expand_allowed_platforms(platforms)
+
+    is_cpython = (impl or interpreter_name()) == "cp"
+    if is_cpython:
+        supported.extend(
+            cpython_tags(
+                python_version=python_version,
+                abis=abis,
+                platforms=platforms,
+            )
+        )
+    else:
+        supported.extend(
+            generic_tags(
+                interpreter=interpreter,
+                abis=abis,
+                platforms=platforms,
+            )
+        )
+    supported.extend(
+        compatible_tags(
+            python_version=python_version,
+            interpreter=interpreter,
+            platforms=platforms,
+        )
+    )
+
+    return supported
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/datetime.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/datetime.py
new file mode 100644
index 0000000..8668b3b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/datetime.py
@@ -0,0 +1,11 @@
+"""For when pip wants to check the date or time.
+"""
+
+import datetime
+
+
+def today_is_later_than(year: int, month: int, day: int) -> bool:
+    today = datetime.date.today()
+    given = datetime.date(year, month, day)
+
+    return today > given
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/deprecation.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/deprecation.py
new file mode 100644
index 0000000..18e9be9
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/deprecation.py
@@ -0,0 +1,188 @@
+"""
+A module that implements tooling to enable easy warnings about deprecations.
+"""
+
+import logging
+import warnings
+from typing import Any, Optional, TextIO, Type, Union
+
+from pip._vendor.packaging.version import parse
+
+from pip import __version__ as current_version  # NOTE: tests patch this name.
+
+DEPRECATION_MSG_PREFIX = "DEPRECATION: "
+
+
+class PipDeprecationWarning(Warning):
+    pass
+
+
+_original_showwarning: Any = None
+
+
+# Warnings <-> Logging Integration
+def _showwarning(
+    message: Union[Warning, str],
+    category: Type[Warning],
+    filename: str,
+    lineno: int,
+    file: Optional[TextIO] = None,
+    line: Optional[str] = None,
+) -> None:
+    if file is not None:
+        if _original_showwarning is not None:
+            _original_showwarning(message, category, filename, lineno, file, line)
+    elif issubclass(category, PipDeprecationWarning):
+        # We use a specially named logger which will handle all of the
+        # deprecation messages for pip.
+        logger = logging.getLogger("pip._internal.deprecations")
+        logger.warning(message)
+    else:
+        _original_showwarning(message, category, filename, lineno, file, line)
+
+
+def install_warning_logger() -> None:
+    # Enable our Deprecation Warnings
+    warnings.simplefilter("default", PipDeprecationWarning, append=True)
+
+    global _original_showwarning
+
+    if _original_showwarning is None:
+        _original_showwarning = warnings.showwarning
+        warnings.showwarning = _showwarning
+
+
+def deprecated(
+    *,
+    reason: str,
+    replacement: Optional[str],
+    gone_in: Optional[str],
+    feature_flag: Optional[str] = None,
+    issue: Optional[int] = None,
+) -> None:
+    """Helper to deprecate existing functionality.
+
+    reason:
+        Textual reason shown to the user about why this functionality has
+        been deprecated. Should be a complete sentence.
+    replacement:
+        Textual suggestion shown to the user about what alternative
+        functionality they can use.
+    gone_in:
+        The version of pip does this functionality should get removed in.
+        Raises an error if pip's current version is greater than or equal to
+        this.
+    feature_flag:
+        Command-line flag of the form --use-feature={feature_flag} for testing
+        upcoming functionality.
+    issue:
+        Issue number on the tracker that would serve as a useful place for
+        users to find related discussion and provide feedback.
+    """
+
+    # Determine whether or not the feature is already gone in this version.
+    is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)
+
+    message_parts = [
+        (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),
+        (
+            gone_in,
+            "pip {} will enforce this behaviour change."
+            if not is_gone
+            else "Since pip {}, this is no longer supported.",
+        ),
+        (
+            replacement,
+            "A possible replacement is {}.",
+        ),
+        (
+            feature_flag,
+            "You can use the flag --use-feature={} to test the upcoming behaviour."
+            if not is_gone
+            else None,
+        ),
+        (
+            issue,
+            "Discussion can be found at https://github.com/pypa/pip/issues/{}",
+        ),
+    ]
+
+    message = " ".join(
+        format_str.format(value)
+        for value, format_str in message_parts
+        if format_str is not None and value is not None
+    )
+
+    # Raise as an error if this behaviour is deprecated.
+    if is_gone:
+        raise PipDeprecationWarning(message)
+
+    warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
+
+
+class LegacyInstallReason:
+    def __init__(
+        self,
+        reason: str,
+        replacement: Optional[str] = None,
+        gone_in: Optional[str] = None,
+        feature_flag: Optional[str] = None,
+        issue: Optional[int] = None,
+        emit_after_success: bool = False,
+        emit_before_install: bool = False,
+    ):
+        self._reason = reason
+        self._replacement = replacement
+        self._gone_in = gone_in
+        self._feature_flag = feature_flag
+        self._issue = issue
+        self.emit_after_success = emit_after_success
+        self.emit_before_install = emit_before_install
+
+    def emit_deprecation(self, name: str) -> None:
+        deprecated(
+            reason=self._reason.format(name=name),
+            replacement=self._replacement,
+            gone_in=self._gone_in,
+            feature_flag=self._feature_flag,
+            issue=self._issue,
+        )
+
+
+LegacyInstallReasonFailedBdistWheel = LegacyInstallReason(
+    reason=(
+        "{name} was installed using the legacy 'setup.py install' "
+        "method, because a wheel could not be built for it."
+    ),
+    replacement="to fix the wheel build issue reported above",
+    gone_in="23.1",
+    issue=8368,
+    emit_after_success=True,
+)
+
+
+LegacyInstallReasonMissingWheelPackage = LegacyInstallReason(
+    reason=(
+        "{name} is being installed using the legacy "
+        "'setup.py install' method, because it does not have a "
+        "'pyproject.toml' and the 'wheel' package "
+        "is not installed."
+    ),
+    replacement="to enable the '--use-pep517' option",
+    gone_in="23.1",
+    issue=8559,
+    emit_before_install=True,
+)
+
+LegacyInstallReasonNoBinaryForcesSetuptoolsInstall = LegacyInstallReason(
+    reason=(
+        "{name} is being installed using the legacy "
+        "'setup.py install' method, because the '--no-binary' option was enabled "
+        "for it and this currently disables local wheel building for projects that "
+        "don't have a 'pyproject.toml' file."
+    ),
+    replacement="to enable the '--use-pep517' option",
+    gone_in="23.1",
+    issue=11451,
+    emit_before_install=True,
+)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/direct_url_helpers.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/direct_url_helpers.py
new file mode 100644
index 0000000..0e8e5e1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/direct_url_helpers.py
@@ -0,0 +1,87 @@
+from typing import Optional
+
+from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo
+from pip._internal.models.link import Link
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs import vcs
+
+
+def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str:
+    """Convert a DirectUrl to a pip requirement string."""
+    direct_url.validate()  # if invalid, this is a pip bug
+    requirement = name + " @ "
+    fragments = []
+    if isinstance(direct_url.info, VcsInfo):
+        requirement += "{}+{}@{}".format(
+            direct_url.info.vcs, direct_url.url, direct_url.info.commit_id
+        )
+    elif isinstance(direct_url.info, ArchiveInfo):
+        requirement += direct_url.url
+        if direct_url.info.hash:
+            fragments.append(direct_url.info.hash)
+    else:
+        assert isinstance(direct_url.info, DirInfo)
+        requirement += direct_url.url
+    if direct_url.subdirectory:
+        fragments.append("subdirectory=" + direct_url.subdirectory)
+    if fragments:
+        requirement += "#" + "&".join(fragments)
+    return requirement
+
+
+def direct_url_for_editable(source_dir: str) -> DirectUrl:
+    return DirectUrl(
+        url=path_to_url(source_dir),
+        info=DirInfo(editable=True),
+    )
+
+
+def direct_url_from_link(
+    link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False
+) -> DirectUrl:
+    if link.is_vcs:
+        vcs_backend = vcs.get_backend_for_scheme(link.scheme)
+        assert vcs_backend
+        url, requested_revision, _ = vcs_backend.get_url_rev_and_auth(
+            link.url_without_fragment
+        )
+        # For VCS links, we need to find out and add commit_id.
+        if link_is_in_wheel_cache:
+            # If the requested VCS link corresponds to a cached
+            # wheel, it means the requested revision was an
+            # immutable commit hash, otherwise it would not have
+            # been cached. In that case we don't have a source_dir
+            # with the VCS checkout.
+            assert requested_revision
+            commit_id = requested_revision
+        else:
+            # If the wheel was not in cache, it means we have
+            # had to checkout from VCS to build and we have a source_dir
+            # which we can inspect to find out the commit id.
+            assert source_dir
+            commit_id = vcs_backend.get_revision(source_dir)
+        return DirectUrl(
+            url=url,
+            info=VcsInfo(
+                vcs=vcs_backend.name,
+                commit_id=commit_id,
+                requested_revision=requested_revision,
+            ),
+            subdirectory=link.subdirectory_fragment,
+        )
+    elif link.is_existing_dir():
+        return DirectUrl(
+            url=link.url_without_fragment,
+            info=DirInfo(),
+            subdirectory=link.subdirectory_fragment,
+        )
+    else:
+        hash = None
+        hash_name = link.hash_name
+        if hash_name:
+            hash = f"{hash_name}={link.hash}"
+        return DirectUrl(
+            url=link.url_without_fragment,
+            info=ArchiveInfo(hash=hash),
+            subdirectory=link.subdirectory_fragment,
+        )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/distutils_args.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/distutils_args.py
new file mode 100644
index 0000000..2fd1862
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/distutils_args.py
@@ -0,0 +1,43 @@
+from getopt import GetoptError, getopt
+from typing import Dict, List
+
+_options = [
+    "exec-prefix=",
+    "home=",
+    "install-base=",
+    "install-data=",
+    "install-headers=",
+    "install-lib=",
+    "install-platlib=",
+    "install-purelib=",
+    "install-scripts=",
+    "prefix=",
+    "root=",
+    "user",
+]
+
+
+def parse_distutils_args(args: List[str]) -> Dict[str, str]:
+    """Parse provided arguments, returning an object that has the matched arguments.
+
+    Any unknown arguments are ignored.
+    """
+    result = {}
+    for arg in args:
+        try:
+            parsed_opt, _ = getopt(args=[arg], shortopts="", longopts=_options)
+        except GetoptError:
+            # We don't care about any other options, which here may be
+            # considered unrecognized since our option list is not
+            # exhaustive.
+            continue
+
+        if not parsed_opt:
+            continue
+
+        option = parsed_opt[0]
+        name_from_parsed = option[0][2:].replace("-", "_")
+        value_from_parsed = option[1] or "true"
+        result[name_from_parsed] = value_from_parsed
+
+    return result
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/egg_link.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/egg_link.py
new file mode 100644
index 0000000..9e0da8d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/egg_link.py
@@ -0,0 +1,75 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import os
+import re
+import sys
+from typing import Optional
+
+from pip._internal.locations import site_packages, user_site
+from pip._internal.utils.virtualenv import (
+    running_under_virtualenv,
+    virtualenv_no_global,
+)
+
+__all__ = [
+    "egg_link_path_from_sys_path",
+    "egg_link_path_from_location",
+]
+
+
+def _egg_link_name(raw_name: str) -> str:
+    """
+    Convert a Name metadata value to a .egg-link name, by applying
+    the same substitution as pkg_resources's safe_name function.
+    Note: we cannot use canonicalize_name because it has a different logic.
+    """
+    return re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link"
+
+
+def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]:
+    """
+    Look for a .egg-link file for project name, by walking sys.path.
+    """
+    egg_link_name = _egg_link_name(raw_name)
+    for path_item in sys.path:
+        egg_link = os.path.join(path_item, egg_link_name)
+        if os.path.isfile(egg_link):
+            return egg_link
+    return None
+
+
+def egg_link_path_from_location(raw_name: str) -> Optional[str]:
+    """
+    Return the path for the .egg-link file if it exists, otherwise, None.
+
+    There's 3 scenarios:
+    1) not in a virtualenv
+       try to find in site.USER_SITE, then site_packages
+    2) in a no-global virtualenv
+       try to find in site_packages
+    3) in a yes-global virtualenv
+       try to find in site_packages, then site.USER_SITE
+       (don't look in global location)
+
+    For #1 and #3, there could be odd cases, where there's an egg-link in 2
+    locations.
+
+    This method will just return the first one found.
+    """
+    sites = []
+    if running_under_virtualenv():
+        sites.append(site_packages)
+        if not virtualenv_no_global() and user_site:
+            sites.append(user_site)
+    else:
+        if user_site:
+            sites.append(user_site)
+        sites.append(site_packages)
+
+    egg_link_name = _egg_link_name(raw_name)
+    for site in sites:
+        egglink = os.path.join(site, egg_link_name)
+        if os.path.isfile(egglink):
+            return egglink
+    return None
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/encoding.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/encoding.py
new file mode 100644
index 0000000..008f06a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/encoding.py
@@ -0,0 +1,36 @@
+import codecs
+import locale
+import re
+import sys
+from typing import List, Tuple
+
+BOMS: List[Tuple[bytes, str]] = [
+    (codecs.BOM_UTF8, "utf-8"),
+    (codecs.BOM_UTF16, "utf-16"),
+    (codecs.BOM_UTF16_BE, "utf-16-be"),
+    (codecs.BOM_UTF16_LE, "utf-16-le"),
+    (codecs.BOM_UTF32, "utf-32"),
+    (codecs.BOM_UTF32_BE, "utf-32-be"),
+    (codecs.BOM_UTF32_LE, "utf-32-le"),
+]
+
+ENCODING_RE = re.compile(rb"coding[:=]\s*([-\w.]+)")
+
+
+def auto_decode(data: bytes) -> str:
+    """Check a bytes string for a BOM to correctly detect the encoding
+
+    Fallback to locale.getpreferredencoding(False) like open() on Python3"""
+    for bom, encoding in BOMS:
+        if data.startswith(bom):
+            return data[len(bom) :].decode(encoding)
+    # Lets check the first two lines as in PEP263
+    for line in data.split(b"\n")[:2]:
+        if line[0:1] == b"#" and ENCODING_RE.search(line):
+            result = ENCODING_RE.search(line)
+            assert result is not None
+            encoding = result.groups()[0].decode("ascii")
+            return data.decode(encoding)
+    return data.decode(
+        locale.getpreferredencoding(False) or sys.getdefaultencoding(),
+    )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/entrypoints.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/entrypoints.py
new file mode 100644
index 0000000..1501369
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/entrypoints.py
@@ -0,0 +1,84 @@
+import itertools
+import os
+import shutil
+import sys
+from typing import List, Optional
+
+from pip._internal.cli.main import main
+from pip._internal.utils.compat import WINDOWS
+
+_EXECUTABLE_NAMES = [
+    "pip",
+    f"pip{sys.version_info.major}",
+    f"pip{sys.version_info.major}.{sys.version_info.minor}",
+]
+if WINDOWS:
+    _allowed_extensions = {"", ".exe"}
+    _EXECUTABLE_NAMES = [
+        "".join(parts)
+        for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions)
+    ]
+
+
+def _wrapper(args: Optional[List[str]] = None) -> int:
+    """Central wrapper for all old entrypoints.
+
+    Historically pip has had several entrypoints defined. Because of issues
+    arising from PATH, sys.path, multiple Pythons, their interactions, and most
+    of them having a pip installed, users suffer every time an entrypoint gets
+    moved.
+
+    To alleviate this pain, and provide a mechanism for warning users and
+    directing them to an appropriate place for help, we now define all of
+    our old entrypoints as wrappers for the current one.
+    """
+    sys.stderr.write(
+        "WARNING: pip is being invoked by an old script wrapper. This will "
+        "fail in a future version of pip.\n"
+        "Please see https://github.com/pypa/pip/issues/5599 for advice on "
+        "fixing the underlying issue.\n"
+        "To avoid this problem you can invoke Python with '-m pip' instead of "
+        "running pip directly.\n"
+    )
+    return main(args)
+
+
+def get_best_invocation_for_this_pip() -> str:
+    """Try to figure out the best way to invoke pip in the current environment."""
+    binary_directory = "Scripts" if WINDOWS else "bin"
+    binary_prefix = os.path.join(sys.prefix, binary_directory)
+
+    # Try to use pip[X[.Y]] names, if those executables for this environment are
+    # the first on PATH with that name.
+    path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep)
+    exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts
+    if exe_are_in_PATH:
+        for exe_name in _EXECUTABLE_NAMES:
+            found_executable = shutil.which(exe_name)
+            binary_executable = os.path.join(binary_prefix, exe_name)
+            if (
+                found_executable
+                and os.path.exists(binary_executable)
+                and os.path.samefile(
+                    found_executable,
+                    binary_executable,
+                )
+            ):
+                return exe_name
+
+    # Use the `-m` invocation, if there's no "nice" invocation.
+    return f"{get_best_invocation_for_this_python()} -m pip"
+
+
+def get_best_invocation_for_this_python() -> str:
+    """Try to figure out the best way to invoke the current Python."""
+    exe = sys.executable
+    exe_name = os.path.basename(exe)
+
+    # Try to use the basename, if it's the first executable.
+    found_executable = shutil.which(exe_name)
+    if found_executable and os.path.samefile(found_executable, exe):
+        return exe_name
+
+    # Use the full executable name, because we couldn't find something simpler.
+    return exe
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/filesystem.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/filesystem.py
new file mode 100644
index 0000000..83c2df7
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/filesystem.py
@@ -0,0 +1,153 @@
+import fnmatch
+import os
+import os.path
+import random
+import sys
+from contextlib import contextmanager
+from tempfile import NamedTemporaryFile
+from typing import Any, BinaryIO, Generator, List, Union, cast
+
+from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
+
+from pip._internal.utils.compat import get_path_uid
+from pip._internal.utils.misc import format_size
+
+
+def check_path_owner(path: str) -> bool:
+    # If we don't have a way to check the effective uid of this process, then
+    # we'll just assume that we own the directory.
+    if sys.platform == "win32" or not hasattr(os, "geteuid"):
+        return True
+
+    assert os.path.isabs(path)
+
+    previous = None
+    while path != previous:
+        if os.path.lexists(path):
+            # Check if path is writable by current user.
+            if os.geteuid() == 0:
+                # Special handling for root user in order to handle properly
+                # cases where users use sudo without -H flag.
+                try:
+                    path_uid = get_path_uid(path)
+                except OSError:
+                    return False
+                return path_uid == 0
+            else:
+                return os.access(path, os.W_OK)
+        else:
+            previous, path = path, os.path.dirname(path)
+    return False  # assume we don't own the path
+
+
+@contextmanager
+def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
+    """Return a file-like object pointing to a tmp file next to path.
+
+    The file is created securely and is ensured to be written to disk
+    after the context reaches its end.
+
+    kwargs will be passed to tempfile.NamedTemporaryFile to control
+    the way the temporary file will be opened.
+    """
+    with NamedTemporaryFile(
+        delete=False,
+        dir=os.path.dirname(path),
+        prefix=os.path.basename(path),
+        suffix=".tmp",
+        **kwargs,
+    ) as f:
+        result = cast(BinaryIO, f)
+        try:
+            yield result
+        finally:
+            result.flush()
+            os.fsync(result.fileno())
+
+
+# Tenacity raises RetryError by default, explicitly raise the original exception
+_replace_retry = retry(reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.25))
+
+replace = _replace_retry(os.replace)
+
+
+# test_writable_dir and _test_writable_dir_win are copied from Flit,
+# with the author's agreement to also place them under pip's license.
+def test_writable_dir(path: str) -> bool:
+    """Check if a directory is writable.
+
+    Uses os.access() on POSIX, tries creating files on Windows.
+    """
+    # If the directory doesn't exist, find the closest parent that does.
+    while not os.path.isdir(path):
+        parent = os.path.dirname(path)
+        if parent == path:
+            break  # Should never get here, but infinite loops are bad
+        path = parent
+
+    if os.name == "posix":
+        return os.access(path, os.W_OK)
+
+    return _test_writable_dir_win(path)
+
+
+def _test_writable_dir_win(path: str) -> bool:
+    # os.access doesn't work on Windows: http://bugs.python.org/issue2528
+    # and we can't use tempfile: http://bugs.python.org/issue22107
+    basename = "accesstest_deleteme_fishfingers_custard_"
+    alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"
+    for _ in range(10):
+        name = basename + "".join(random.choice(alphabet) for _ in range(6))
+        file = os.path.join(path, name)
+        try:
+            fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL)
+        except FileExistsError:
+            pass
+        except PermissionError:
+            # This could be because there's a directory with the same name.
+            # But it's highly unlikely there's a directory called that,
+            # so we'll assume it's because the parent dir is not writable.
+            # This could as well be because the parent dir is not readable,
+            # due to non-privileged user access.
+            return False
+        else:
+            os.close(fd)
+            os.unlink(file)
+            return True
+
+    # This should never be reached
+    raise OSError("Unexpected condition testing for writable directory")
+
+
+def find_files(path: str, pattern: str) -> List[str]:
+    """Returns a list of absolute paths of files beneath path, recursively,
+    with filenames which match the UNIX-style shell glob pattern."""
+    result: List[str] = []
+    for root, _, files in os.walk(path):
+        matches = fnmatch.filter(files, pattern)
+        result.extend(os.path.join(root, f) for f in matches)
+    return result
+
+
+def file_size(path: str) -> Union[int, float]:
+    # If it's a symlink, return 0.
+    if os.path.islink(path):
+        return 0
+    return os.path.getsize(path)
+
+
+def format_file_size(path: str) -> str:
+    return format_size(file_size(path))
+
+
+def directory_size(path: str) -> Union[int, float]:
+    size = 0.0
+    for root, _dirs, files in os.walk(path):
+        for filename in files:
+            file_path = os.path.join(root, filename)
+            size += file_size(file_path)
+    return size
+
+
+def format_directory_size(path: str) -> str:
+    return format_size(directory_size(path))
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/filetypes.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/filetypes.py
new file mode 100644
index 0000000..5948570
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/filetypes.py
@@ -0,0 +1,27 @@
+"""Filetype information.
+"""
+
+from typing import Tuple
+
+from pip._internal.utils.misc import splitext
+
+WHEEL_EXTENSION = ".whl"
+BZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz")
+XZ_EXTENSIONS: Tuple[str, ...] = (
+    ".tar.xz",
+    ".txz",
+    ".tlz",
+    ".tar.lz",
+    ".tar.lzma",
+)
+ZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION)
+TAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar")
+ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
+
+
+def is_archive_file(name: str) -> bool:
+    """Return True if `name` is a considered as an archive file."""
+    ext = splitext(name)[1].lower()
+    if ext in ARCHIVE_EXTENSIONS:
+        return True
+    return False
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/glibc.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/glibc.py
new file mode 100644
index 0000000..7bd3c20
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/glibc.py
@@ -0,0 +1,88 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import os
+import sys
+from typing import Optional, Tuple
+
+
+def glibc_version_string() -> Optional[str]:
+    "Returns glibc version string, or None if not using glibc."
+    return glibc_version_string_confstr() or glibc_version_string_ctypes()
+
+
+def glibc_version_string_confstr() -> Optional[str]:
+    "Primary implementation of glibc_version_string using os.confstr."
+    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+    # to be broken or missing. This strategy is used in the standard library
+    # platform module:
+    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
+    if sys.platform == "win32":
+        return None
+    try:
+        # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
+        _, version = os.confstr("CS_GNU_LIBC_VERSION").split()
+    except (AttributeError, OSError, ValueError):
+        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+        return None
+    return version
+
+
+def glibc_version_string_ctypes() -> Optional[str]:
+    "Fallback implementation of glibc_version_string using ctypes."
+
+    try:
+        import ctypes
+    except ImportError:
+        return None
+
+    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+    # manpage says, "If filename is NULL, then the returned handle is for the
+    # main program". This way we can let the linker do the work to figure out
+    # which libc our process is actually using.
+    process_namespace = ctypes.CDLL(None)
+    try:
+        gnu_get_libc_version = process_namespace.gnu_get_libc_version
+    except AttributeError:
+        # Symbol doesn't exist -> therefore, we are not linked to
+        # glibc.
+        return None
+
+    # Call gnu_get_libc_version, which returns a string like "2.5"
+    gnu_get_libc_version.restype = ctypes.c_char_p
+    version_str = gnu_get_libc_version()
+    # py2 / py3 compatibility:
+    if not isinstance(version_str, str):
+        version_str = version_str.decode("ascii")
+
+    return version_str
+
+
+# platform.libc_ver regularly returns completely nonsensical glibc
+# versions. E.g. on my computer, platform says:
+#
+#   ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
+#   ('glibc', '2.7')
+#   ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
+#   ('glibc', '2.9')
+#
+# But the truth is:
+#
+#   ~$ ldd --version
+#   ldd (Debian GLIBC 2.22-11) 2.22
+#
+# This is unfortunate, because it means that the linehaul data on libc
+# versions that was generated by pip 8.1.2 and earlier is useless and
+# misleading. Solution: instead of using platform, use our code that actually
+# works.
+def libc_ver() -> Tuple[str, str]:
+    """Try to determine the glibc version
+
+    Returns a tuple of strings (lib, version) which default to empty strings
+    in case the lookup fails.
+    """
+    glibc_version = glibc_version_string()
+    if glibc_version is None:
+        return ("", "")
+    else:
+        return ("glibc", glibc_version)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/hashes.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/hashes.py
new file mode 100644
index 0000000..7672730
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/hashes.py
@@ -0,0 +1,144 @@
+import hashlib
+from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional
+
+from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError
+from pip._internal.utils.misc import read_chunks
+
+if TYPE_CHECKING:
+    from hashlib import _Hash
+
+    # NoReturn introduced in 3.6.2; imported only for type checking to maintain
+    # pip compatibility with older patch versions of Python 3.6
+    from typing import NoReturn
+
+
+# The recommended hash algo of the moment. Change this whenever the state of
+# the art changes; it won't hurt backward compatibility.
+FAVORITE_HASH = "sha256"
+
+
+# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
+# Currently, those are the ones at least as collision-resistant as sha256.
+STRONG_HASHES = ["sha256", "sha384", "sha512"]
+
+
+class Hashes:
+    """A wrapper that builds multiple hashes at once and checks them against
+    known-good values
+
+    """
+
+    def __init__(self, hashes: Optional[Dict[str, List[str]]] = None) -> None:
+        """
+        :param hashes: A dict of algorithm names pointing to lists of allowed
+            hex digests
+        """
+        allowed = {}
+        if hashes is not None:
+            for alg, keys in hashes.items():
+                # Make sure values are always sorted (to ease equality checks)
+                allowed[alg] = sorted(keys)
+        self._allowed = allowed
+
+    def __and__(self, other: "Hashes") -> "Hashes":
+        if not isinstance(other, Hashes):
+            return NotImplemented
+
+        # If either of the Hashes object is entirely empty (i.e. no hash
+        # specified at all), all hashes from the other object are allowed.
+        if not other:
+            return self
+        if not self:
+            return other
+
+        # Otherwise only hashes that present in both objects are allowed.
+        new = {}
+        for alg, values in other._allowed.items():
+            if alg not in self._allowed:
+                continue
+            new[alg] = [v for v in values if v in self._allowed[alg]]
+        return Hashes(new)
+
+    @property
+    def digest_count(self) -> int:
+        return sum(len(digests) for digests in self._allowed.values())
+
+    def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool:
+        """Return whether the given hex digest is allowed."""
+        return hex_digest in self._allowed.get(hash_name, [])
+
+    def check_against_chunks(self, chunks: Iterable[bytes]) -> None:
+        """Check good hashes against ones built from iterable of chunks of
+        data.
+
+        Raise HashMismatch if none match.
+
+        """
+        gots = {}
+        for hash_name in self._allowed.keys():
+            try:
+                gots[hash_name] = hashlib.new(hash_name)
+            except (ValueError, TypeError):
+                raise InstallationError(f"Unknown hash name: {hash_name}")
+
+        for chunk in chunks:
+            for hash in gots.values():
+                hash.update(chunk)
+
+        for hash_name, got in gots.items():
+            if got.hexdigest() in self._allowed[hash_name]:
+                return
+        self._raise(gots)
+
+    def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
+        raise HashMismatch(self._allowed, gots)
+
+    def check_against_file(self, file: BinaryIO) -> None:
+        """Check good hashes against a file-like object
+
+        Raise HashMismatch if none match.
+
+        """
+        return self.check_against_chunks(read_chunks(file))
+
+    def check_against_path(self, path: str) -> None:
+        with open(path, "rb") as file:
+            return self.check_against_file(file)
+
+    def __bool__(self) -> bool:
+        """Return whether I know any known-good hashes."""
+        return bool(self._allowed)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Hashes):
+            return NotImplemented
+        return self._allowed == other._allowed
+
+    def __hash__(self) -> int:
+        return hash(
+            ",".join(
+                sorted(
+                    ":".join((alg, digest))
+                    for alg, digest_list in self._allowed.items()
+                    for digest in digest_list
+                )
+            )
+        )
+
+
+class MissingHashes(Hashes):
+    """A workalike for Hashes used when we're missing a hash for a requirement
+
+    It computes the actual hash of the requirement and raises a HashMissing
+    exception showing it to the user.
+
+    """
+
+    def __init__(self) -> None:
+        """Don't offer the ``hashes`` kwarg."""
+        # Pass our favorite hash in to generate a "gotten hash". With the
+        # empty list, it will never match, so an error will always raise.
+        super().__init__(hashes={FAVORITE_HASH: []})
+
+    def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
+        raise HashMissing(gots[FAVORITE_HASH].hexdigest())
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/inject_securetransport.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/inject_securetransport.py
new file mode 100644
index 0000000..276aa79
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/inject_securetransport.py
@@ -0,0 +1,35 @@
+"""A helper module that injects SecureTransport, on import.
+
+The import should be done as early as possible, to ensure all requests and
+sessions (or whatever) are created after injecting SecureTransport.
+
+Note that we only do the injection on macOS, when the linked OpenSSL is too
+old to handle TLSv1.2.
+"""
+
+import sys
+
+
+def inject_securetransport() -> None:
+    # Only relevant on macOS
+    if sys.platform != "darwin":
+        return
+
+    try:
+        import ssl
+    except ImportError:
+        return
+
+    # Checks for OpenSSL 1.0.1
+    if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F:
+        return
+
+    try:
+        from pip._vendor.urllib3.contrib import securetransport
+    except (ImportError, OSError):
+        return
+
+    securetransport.inject_into_urllib3()
+
+
+inject_securetransport()
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/logging.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/logging.py
new file mode 100644
index 0000000..c10e1f4
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/logging.py
@@ -0,0 +1,348 @@
+import contextlib
+import errno
+import logging
+import logging.handlers
+import os
+import sys
+import threading
+from dataclasses import dataclass
+from io import TextIOWrapper
+from logging import Filter
+from typing import Any, ClassVar, Generator, List, Optional, TextIO, Type
+
+from pip._vendor.rich.console import (
+    Console,
+    ConsoleOptions,
+    ConsoleRenderable,
+    RenderableType,
+    RenderResult,
+    RichCast,
+)
+from pip._vendor.rich.highlighter import NullHighlighter
+from pip._vendor.rich.logging import RichHandler
+from pip._vendor.rich.segment import Segment
+from pip._vendor.rich.style import Style
+
+from pip._internal.utils._log import VERBOSE, getLogger
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
+from pip._internal.utils.misc import ensure_dir
+
+_log_state = threading.local()
+subprocess_logger = getLogger("pip.subprocessor")
+
+
+class BrokenStdoutLoggingError(Exception):
+    """
+    Raised if BrokenPipeError occurs for the stdout stream while logging.
+    """
+
+
+def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool:
+    if exc_class is BrokenPipeError:
+        return True
+
+    # On Windows, a broken pipe can show up as EINVAL rather than EPIPE:
+    # https://bugs.python.org/issue19612
+    # https://bugs.python.org/issue30418
+    if not WINDOWS:
+        return False
+
+    return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE)
+
+
+@contextlib.contextmanager
+def indent_log(num: int = 2) -> Generator[None, None, None]:
+    """
+    A context manager which will cause the log output to be indented for any
+    log messages emitted inside it.
+    """
+    # For thread-safety
+    _log_state.indentation = get_indentation()
+    _log_state.indentation += num
+    try:
+        yield
+    finally:
+        _log_state.indentation -= num
+
+
+def get_indentation() -> int:
+    return getattr(_log_state, "indentation", 0)
+
+
+class IndentingFormatter(logging.Formatter):
+    default_time_format = "%Y-%m-%dT%H:%M:%S"
+
+    def __init__(
+        self,
+        *args: Any,
+        add_timestamp: bool = False,
+        **kwargs: Any,
+    ) -> None:
+        """
+        A logging.Formatter that obeys the indent_log() context manager.
+
+        :param add_timestamp: A bool indicating output lines should be prefixed
+            with their record's timestamp.
+        """
+        self.add_timestamp = add_timestamp
+        super().__init__(*args, **kwargs)
+
+    def get_message_start(self, formatted: str, levelno: int) -> str:
+        """
+        Return the start of the formatted log message (not counting the
+        prefix to add to each line).
+        """
+        if levelno < logging.WARNING:
+            return ""
+        if formatted.startswith(DEPRECATION_MSG_PREFIX):
+            # Then the message already has a prefix.  We don't want it to
+            # look like "WARNING: DEPRECATION: ...."
+            return ""
+        if levelno < logging.ERROR:
+            return "WARNING: "
+
+        return "ERROR: "
+
+    def format(self, record: logging.LogRecord) -> str:
+        """
+        Calls the standard formatter, but will indent all of the log message
+        lines by our current indentation level.
+        """
+        formatted = super().format(record)
+        message_start = self.get_message_start(formatted, record.levelno)
+        formatted = message_start + formatted
+
+        prefix = ""
+        if self.add_timestamp:
+            prefix = f"{self.formatTime(record)} "
+        prefix += " " * get_indentation()
+        formatted = "".join([prefix + line for line in formatted.splitlines(True)])
+        return formatted
+
+
+@dataclass
+class IndentedRenderable:
+    renderable: RenderableType
+    indent: int
+
+    def __rich_console__(
+        self, console: Console, options: ConsoleOptions
+    ) -> RenderResult:
+        segments = console.render(self.renderable, options)
+        lines = Segment.split_lines(segments)
+        for line in lines:
+            yield Segment(" " * self.indent)
+            yield from line
+            yield Segment("\n")
+
+
+class RichPipStreamHandler(RichHandler):
+    KEYWORDS: ClassVar[Optional[List[str]]] = []
+
+    def __init__(self, stream: Optional[TextIO], no_color: bool) -> None:
+        super().__init__(
+            console=Console(file=stream, no_color=no_color, soft_wrap=True),
+            show_time=False,
+            show_level=False,
+            show_path=False,
+            highlighter=NullHighlighter(),
+        )
+
+    # Our custom override on Rich's logger, to make things work as we need them to.
+    def emit(self, record: logging.LogRecord) -> None:
+        style: Optional[Style] = None
+
+        # If we are given a diagnostic error to present, present it with indentation.
+        assert isinstance(record.args, tuple)
+        if record.msg == "[present-rich] %s" and len(record.args) == 1:
+            rich_renderable = record.args[0]
+            assert isinstance(
+                rich_renderable, (ConsoleRenderable, RichCast, str)
+            ), f"{rich_renderable} is not rich-console-renderable"
+
+            renderable: RenderableType = IndentedRenderable(
+                rich_renderable, indent=get_indentation()
+            )
+        else:
+            message = self.format(record)
+            renderable = self.render_message(record, message)
+            if record.levelno is not None:
+                if record.levelno >= logging.ERROR:
+                    style = Style(color="red")
+                elif record.levelno >= logging.WARNING:
+                    style = Style(color="yellow")
+
+        try:
+            self.console.print(renderable, overflow="ignore", crop=False, style=style)
+        except Exception:
+            self.handleError(record)
+
+    def handleError(self, record: logging.LogRecord) -> None:
+        """Called when logging is unable to log some output."""
+
+        exc_class, exc = sys.exc_info()[:2]
+        # If a broken pipe occurred while calling write() or flush() on the
+        # stdout stream in logging's Handler.emit(), then raise our special
+        # exception so we can handle it in main() instead of logging the
+        # broken pipe error and continuing.
+        if (
+            exc_class
+            and exc
+            and self.console.file is sys.stdout
+            and _is_broken_pipe_error(exc_class, exc)
+        ):
+            raise BrokenStdoutLoggingError()
+
+        return super().handleError(record)
+
+
+class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
+    def _open(self) -> TextIOWrapper:
+        ensure_dir(os.path.dirname(self.baseFilename))
+        return super()._open()
+
+
+class MaxLevelFilter(Filter):
+    def __init__(self, level: int) -> None:
+        self.level = level
+
+    def filter(self, record: logging.LogRecord) -> bool:
+        return record.levelno < self.level
+
+
+class ExcludeLoggerFilter(Filter):
+
+    """
+    A logging Filter that excludes records from a logger (or its children).
+    """
+
+    def filter(self, record: logging.LogRecord) -> bool:
+        # The base Filter class allows only records from a logger (or its
+        # children).
+        return not super().filter(record)
+
+
+def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int:
+    """Configures and sets up all of the logging
+
+    Returns the requested logging level, as its integer value.
+    """
+
+    # Determine the level to be logging at.
+    if verbosity >= 2:
+        level_number = logging.DEBUG
+    elif verbosity == 1:
+        level_number = VERBOSE
+    elif verbosity == -1:
+        level_number = logging.WARNING
+    elif verbosity == -2:
+        level_number = logging.ERROR
+    elif verbosity <= -3:
+        level_number = logging.CRITICAL
+    else:
+        level_number = logging.INFO
+
+    level = logging.getLevelName(level_number)
+
+    # The "root" logger should match the "console" level *unless* we also need
+    # to log to a user log file.
+    include_user_log = user_log_file is not None
+    if include_user_log:
+        additional_log_file = user_log_file
+        root_level = "DEBUG"
+    else:
+        additional_log_file = "/dev/null"
+        root_level = level
+
+    # Disable any logging besides WARNING unless we have DEBUG level logging
+    # enabled for vendored libraries.
+    vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
+
+    # Shorthands for clarity
+    log_streams = {
+        "stdout": "ext://sys.stdout",
+        "stderr": "ext://sys.stderr",
+    }
+    handler_classes = {
+        "stream": "pip._internal.utils.logging.RichPipStreamHandler",
+        "file": "pip._internal.utils.logging.BetterRotatingFileHandler",
+    }
+    handlers = ["console", "console_errors", "console_subprocess"] + (
+        ["user_log"] if include_user_log else []
+    )
+
+    logging.config.dictConfig(
+        {
+            "version": 1,
+            "disable_existing_loggers": False,
+            "filters": {
+                "exclude_warnings": {
+                    "()": "pip._internal.utils.logging.MaxLevelFilter",
+                    "level": logging.WARNING,
+                },
+                "restrict_to_subprocess": {
+                    "()": "logging.Filter",
+                    "name": subprocess_logger.name,
+                },
+                "exclude_subprocess": {
+                    "()": "pip._internal.utils.logging.ExcludeLoggerFilter",
+                    "name": subprocess_logger.name,
+                },
+            },
+            "formatters": {
+                "indent": {
+                    "()": IndentingFormatter,
+                    "format": "%(message)s",
+                },
+                "indent_with_timestamp": {
+                    "()": IndentingFormatter,
+                    "format": "%(message)s",
+                    "add_timestamp": True,
+                },
+            },
+            "handlers": {
+                "console": {
+                    "level": level,
+                    "class": handler_classes["stream"],
+                    "no_color": no_color,
+                    "stream": log_streams["stdout"],
+                    "filters": ["exclude_subprocess", "exclude_warnings"],
+                    "formatter": "indent",
+                },
+                "console_errors": {
+                    "level": "WARNING",
+                    "class": handler_classes["stream"],
+                    "no_color": no_color,
+                    "stream": log_streams["stderr"],
+                    "filters": ["exclude_subprocess"],
+                    "formatter": "indent",
+                },
+                # A handler responsible for logging to the console messages
+                # from the "subprocessor" logger.
+                "console_subprocess": {
+                    "level": level,
+                    "class": handler_classes["stream"],
+                    "stream": log_streams["stderr"],
+                    "no_color": no_color,
+                    "filters": ["restrict_to_subprocess"],
+                    "formatter": "indent",
+                },
+                "user_log": {
+                    "level": "DEBUG",
+                    "class": handler_classes["file"],
+                    "filename": additional_log_file,
+                    "encoding": "utf-8",
+                    "delay": True,
+                    "formatter": "indent_with_timestamp",
+                },
+            },
+            "root": {
+                "level": root_level,
+                "handlers": handlers,
+            },
+            "loggers": {"pip._vendor": {"level": vendored_log_level}},
+        }
+    )
+
+    return level_number
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/misc.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/misc.py
new file mode 100644
index 0000000..a8f4cb5
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/misc.py
@@ -0,0 +1,723 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import contextlib
+import errno
+import getpass
+import hashlib
+import io
+import logging
+import os
+import posixpath
+import shutil
+import stat
+import sys
+import urllib.parse
+from io import StringIO
+from itertools import filterfalse, tee, zip_longest
+from types import TracebackType
+from typing import (
+    Any,
+    BinaryIO,
+    Callable,
+    ContextManager,
+    Dict,
+    Generator,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    TextIO,
+    Tuple,
+    Type,
+    TypeVar,
+    cast,
+)
+
+from pip._vendor.pep517 import Pep517HookCaller
+from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
+
+from pip import __version__
+from pip._internal.exceptions import CommandError
+from pip._internal.locations import get_major_minor_version
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+__all__ = [
+    "rmtree",
+    "display_path",
+    "backup_dir",
+    "ask",
+    "splitext",
+    "format_size",
+    "is_installable_dir",
+    "normalize_path",
+    "renames",
+    "get_prog",
+    "captured_stdout",
+    "ensure_dir",
+    "remove_auth_from_url",
+    "ConfiguredPep517HookCaller",
+]
+
+
+logger = logging.getLogger(__name__)
+
+T = TypeVar("T")
+ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType]
+VersionInfo = Tuple[int, int, int]
+NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
+
+
+def get_pip_version() -> str:
+    pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
+    pip_pkg_dir = os.path.abspath(pip_pkg_dir)
+
+    return "pip {} from {} (python {})".format(
+        __version__,
+        pip_pkg_dir,
+        get_major_minor_version(),
+    )
+
+
+def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]:
+    """
+    Convert a tuple of ints representing a Python version to one of length
+    three.
+
+    :param py_version_info: a tuple of ints representing a Python version,
+        or None to specify no version. The tuple can have any length.
+
+    :return: a tuple of length three if `py_version_info` is non-None.
+        Otherwise, return `py_version_info` unchanged (i.e. None).
+    """
+    if len(py_version_info) < 3:
+        py_version_info += (3 - len(py_version_info)) * (0,)
+    elif len(py_version_info) > 3:
+        py_version_info = py_version_info[:3]
+
+    return cast("VersionInfo", py_version_info)
+
+
+def ensure_dir(path: str) -> None:
+    """os.path.makedirs without EEXIST."""
+    try:
+        os.makedirs(path)
+    except OSError as e:
+        # Windows can raise spurious ENOTEMPTY errors. See #6426.
+        if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
+            raise
+
+
+def get_prog() -> str:
+    try:
+        prog = os.path.basename(sys.argv[0])
+        if prog in ("__main__.py", "-c"):
+            return f"{sys.executable} -m pip"
+        else:
+            return prog
+    except (AttributeError, TypeError, IndexError):
+        pass
+    return "pip"
+
+
+# Retry every half second for up to 3 seconds
+# Tenacity raises RetryError by default, explicitly raise the original exception
+@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5))
+def rmtree(dir: str, ignore_errors: bool = False) -> None:
+    shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler)
+
+
+def rmtree_errorhandler(func: Callable[..., Any], path: str, exc_info: ExcInfo) -> None:
+    """On Windows, the files in .svn are read-only, so when rmtree() tries to
+    remove them, an exception is thrown.  We catch that here, remove the
+    read-only attribute, and hopefully continue without problems."""
+    try:
+        has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE)
+    except OSError:
+        # it's equivalent to os.path.exists
+        return
+
+    if has_attr_readonly:
+        # convert to read/write
+        os.chmod(path, stat.S_IWRITE)
+        # use the original function to repeat the operation
+        func(path)
+        return
+    else:
+        raise
+
+
+def display_path(path: str) -> str:
+    """Gives the display value for a given path, making it relative to cwd
+    if possible."""
+    path = os.path.normcase(os.path.abspath(path))
+    if path.startswith(os.getcwd() + os.path.sep):
+        path = "." + path[len(os.getcwd()) :]
+    return path
+
+
+def backup_dir(dir: str, ext: str = ".bak") -> str:
+    """Figure out the name of a directory to back up the given dir to
+    (adding .bak, .bak2, etc)"""
+    n = 1
+    extension = ext
+    while os.path.exists(dir + extension):
+        n += 1
+        extension = ext + str(n)
+    return dir + extension
+
+
+def ask_path_exists(message: str, options: Iterable[str]) -> str:
+    for action in os.environ.get("PIP_EXISTS_ACTION", "").split():
+        if action in options:
+            return action
+    return ask(message, options)
+
+
+def _check_no_input(message: str) -> None:
+    """Raise an error if no input is allowed."""
+    if os.environ.get("PIP_NO_INPUT"):
+        raise Exception(
+            f"No input was expected ($PIP_NO_INPUT set); question: {message}"
+        )
+
+
+def ask(message: str, options: Iterable[str]) -> str:
+    """Ask the message interactively, with the given possible responses"""
+    while 1:
+        _check_no_input(message)
+        response = input(message)
+        response = response.strip().lower()
+        if response not in options:
+            print(
+                "Your response ({!r}) was not one of the expected responses: "
+                "{}".format(response, ", ".join(options))
+            )
+        else:
+            return response
+
+
+def ask_input(message: str) -> str:
+    """Ask for input interactively."""
+    _check_no_input(message)
+    return input(message)
+
+
+def ask_password(message: str) -> str:
+    """Ask for a password interactively."""
+    _check_no_input(message)
+    return getpass.getpass(message)
+
+
+def strtobool(val: str) -> int:
+    """Convert a string representation of truth to true (1) or false (0).
+
+    True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+    are 'n', 'no', 'f', 'false', 'off', and '0'.  Raises ValueError if
+    'val' is anything else.
+    """
+    val = val.lower()
+    if val in ("y", "yes", "t", "true", "on", "1"):
+        return 1
+    elif val in ("n", "no", "f", "false", "off", "0"):
+        return 0
+    else:
+        raise ValueError(f"invalid truth value {val!r}")
+
+
+def format_size(bytes: float) -> str:
+    if bytes > 1000 * 1000:
+        return "{:.1f} MB".format(bytes / 1000.0 / 1000)
+    elif bytes > 10 * 1000:
+        return "{} kB".format(int(bytes / 1000))
+    elif bytes > 1000:
+        return "{:.1f} kB".format(bytes / 1000.0)
+    else:
+        return "{} bytes".format(int(bytes))
+
+
+def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]:
+    """Return a list of formatted rows and a list of column sizes.
+
+    For example::
+
+    >>> tabulate([['foobar', 2000], [0xdeadbeef]])
+    (['foobar     2000', '3735928559'], [10, 4])
+    """
+    rows = [tuple(map(str, row)) for row in rows]
+    sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")]
+    table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows]
+    return table, sizes
+
+
+def is_installable_dir(path: str) -> bool:
+    """Is path is a directory containing pyproject.toml or setup.py?
+
+    If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for
+    a legacy setuptools layout by identifying setup.py. We don't check for the
+    setup.cfg because using it without setup.py is only available for PEP 517
+    projects, which are already covered by the pyproject.toml check.
+    """
+    if not os.path.isdir(path):
+        return False
+    if os.path.isfile(os.path.join(path, "pyproject.toml")):
+        return True
+    if os.path.isfile(os.path.join(path, "setup.py")):
+        return True
+    return False
+
+
+def read_chunks(
+    file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE
+) -> Generator[bytes, None, None]:
+    """Yield pieces of data from a file-like object until EOF."""
+    while True:
+        chunk = file.read(size)
+        if not chunk:
+            break
+        yield chunk
+
+
+def normalize_path(path: str, resolve_symlinks: bool = True) -> str:
+    """
+    Convert a path to its canonical, case-normalized, absolute version.
+
+    """
+    path = os.path.expanduser(path)
+    if resolve_symlinks:
+        path = os.path.realpath(path)
+    else:
+        path = os.path.abspath(path)
+    return os.path.normcase(path)
+
+
+def splitext(path: str) -> Tuple[str, str]:
+    """Like os.path.splitext, but take off .tar too"""
+    base, ext = posixpath.splitext(path)
+    if base.lower().endswith(".tar"):
+        ext = base[-4:] + ext
+        base = base[:-4]
+    return base, ext
+
+
+def renames(old: str, new: str) -> None:
+    """Like os.renames(), but handles renaming across devices."""
+    # Implementation borrowed from os.renames().
+    head, tail = os.path.split(new)
+    if head and tail and not os.path.exists(head):
+        os.makedirs(head)
+
+    shutil.move(old, new)
+
+    head, tail = os.path.split(old)
+    if head and tail:
+        try:
+            os.removedirs(head)
+        except OSError:
+            pass
+
+
+def is_local(path: str) -> bool:
+    """
+    Return True if path is within sys.prefix, if we're running in a virtualenv.
+
+    If we're not in a virtualenv, all paths are considered "local."
+
+    Caution: this function assumes the head of path has been normalized
+    with normalize_path.
+    """
+    if not running_under_virtualenv():
+        return True
+    return path.startswith(normalize_path(sys.prefix))
+
+
+def write_output(msg: Any, *args: Any) -> None:
+    logger.info(msg, *args)
+
+
+class StreamWrapper(StringIO):
+    orig_stream: TextIO = None
+
+    @classmethod
+    def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper":
+        cls.orig_stream = orig_stream
+        return cls()
+
+    # compileall.compile_dir() needs stdout.encoding to print to stdout
+    # https://github.com/python/mypy/issues/4125
+    @property
+    def encoding(self):  # type: ignore
+        return self.orig_stream.encoding
+
+
+@contextlib.contextmanager
+def captured_output(stream_name: str) -> Generator[StreamWrapper, None, None]:
+    """Return a context manager used by captured_stdout/stdin/stderr
+    that temporarily replaces the sys stream *stream_name* with a StringIO.
+
+    Taken from Lib/support/__init__.py in the CPython repo.
+    """
+    orig_stdout = getattr(sys, stream_name)
+    setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
+    try:
+        yield getattr(sys, stream_name)
+    finally:
+        setattr(sys, stream_name, orig_stdout)
+
+
+def captured_stdout() -> ContextManager[StreamWrapper]:
+    """Capture the output of sys.stdout:
+
+       with captured_stdout() as stdout:
+           print('hello')
+       self.assertEqual(stdout.getvalue(), 'hello\n')
+
+    Taken from Lib/support/__init__.py in the CPython repo.
+    """
+    return captured_output("stdout")
+
+
+def captured_stderr() -> ContextManager[StreamWrapper]:
+    """
+    See captured_stdout().
+    """
+    return captured_output("stderr")
+
+
+# Simulates an enum
+def enum(*sequential: Any, **named: Any) -> Type[Any]:
+    enums = dict(zip(sequential, range(len(sequential))), **named)
+    reverse = {value: key for key, value in enums.items()}
+    enums["reverse_mapping"] = reverse
+    return type("Enum", (), enums)
+
+
+def build_netloc(host: str, port: Optional[int]) -> str:
+    """
+    Build a netloc from a host-port pair
+    """
+    if port is None:
+        return host
+    if ":" in host:
+        # Only wrap host with square brackets when it is IPv6
+        host = f"[{host}]"
+    return f"{host}:{port}"
+
+
+def build_url_from_netloc(netloc: str, scheme: str = "https") -> str:
+    """
+    Build a full URL from a netloc.
+    """
+    if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc:
+        # It must be a bare IPv6 address, so wrap it with brackets.
+        netloc = f"[{netloc}]"
+    return f"{scheme}://{netloc}"
+
+
+def parse_netloc(netloc: str) -> Tuple[str, Optional[int]]:
+    """
+    Return the host-port pair from a netloc.
+    """
+    url = build_url_from_netloc(netloc)
+    parsed = urllib.parse.urlparse(url)
+    return parsed.hostname, parsed.port
+
+
+def split_auth_from_netloc(netloc: str) -> NetlocTuple:
+    """
+    Parse out and remove the auth information from a netloc.
+
+    Returns: (netloc, (username, password)).
+    """
+    if "@" not in netloc:
+        return netloc, (None, None)
+
+    # Split from the right because that's how urllib.parse.urlsplit()
+    # behaves if more than one @ is present (which can be checked using
+    # the password attribute of urlsplit()'s return value).
+    auth, netloc = netloc.rsplit("@", 1)
+    pw: Optional[str] = None
+    if ":" in auth:
+        # Split from the left because that's how urllib.parse.urlsplit()
+        # behaves if more than one : is present (which again can be checked
+        # using the password attribute of the return value)
+        user, pw = auth.split(":", 1)
+    else:
+        user, pw = auth, None
+
+    user = urllib.parse.unquote(user)
+    if pw is not None:
+        pw = urllib.parse.unquote(pw)
+
+    return netloc, (user, pw)
+
+
+def redact_netloc(netloc: str) -> str:
+    """
+    Replace the sensitive data in a netloc with "****", if it exists.
+
+    For example:
+        - "user:pass@example.com" returns "user:****@example.com"
+        - "accesstoken@example.com" returns "****@example.com"
+    """
+    netloc, (user, password) = split_auth_from_netloc(netloc)
+    if user is None:
+        return netloc
+    if password is None:
+        user = "****"
+        password = ""
+    else:
+        user = urllib.parse.quote(user)
+        password = ":****"
+    return "{user}{password}@{netloc}".format(
+        user=user, password=password, netloc=netloc
+    )
+
+
+def _transform_url(
+    url: str, transform_netloc: Callable[[str], Tuple[Any, ...]]
+) -> Tuple[str, NetlocTuple]:
+    """Transform and replace netloc in a url.
+
+    transform_netloc is a function taking the netloc and returning a
+    tuple. The first element of this tuple is the new netloc. The
+    entire tuple is returned.
+
+    Returns a tuple containing the transformed url as item 0 and the
+    original tuple returned by transform_netloc as item 1.
+    """
+    purl = urllib.parse.urlsplit(url)
+    netloc_tuple = transform_netloc(purl.netloc)
+    # stripped url
+    url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment)
+    surl = urllib.parse.urlunsplit(url_pieces)
+    return surl, cast("NetlocTuple", netloc_tuple)
+
+
+def _get_netloc(netloc: str) -> NetlocTuple:
+    return split_auth_from_netloc(netloc)
+
+
+def _redact_netloc(netloc: str) -> Tuple[str]:
+    return (redact_netloc(netloc),)
+
+
+def split_auth_netloc_from_url(url: str) -> Tuple[str, str, Tuple[str, str]]:
+    """
+    Parse a url into separate netloc, auth, and url with no auth.
+
+    Returns: (url_without_auth, netloc, (username, password))
+    """
+    url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc)
+    return url_without_auth, netloc, auth
+
+
+def remove_auth_from_url(url: str) -> str:
+    """Return a copy of url with 'username:password@' removed."""
+    # username/pass params are passed to subversion through flags
+    # and are not recognized in the url.
+    return _transform_url(url, _get_netloc)[0]
+
+
+def redact_auth_from_url(url: str) -> str:
+    """Replace the password in a given url with ****."""
+    return _transform_url(url, _redact_netloc)[0]
+
+
+class HiddenText:
+    def __init__(self, secret: str, redacted: str) -> None:
+        self.secret = secret
+        self.redacted = redacted
+
+    def __repr__(self) -> str:
+        return "<HiddenText {!r}>".format(str(self))
+
+    def __str__(self) -> str:
+        return self.redacted
+
+    # This is useful for testing.
+    def __eq__(self, other: Any) -> bool:
+        if type(self) != type(other):
+            return False
+
+        # The string being used for redaction doesn't also have to match,
+        # just the raw, original string.
+        return self.secret == other.secret
+
+
+def hide_value(value: str) -> HiddenText:
+    return HiddenText(value, redacted="****")
+
+
+def hide_url(url: str) -> HiddenText:
+    redacted = redact_auth_from_url(url)
+    return HiddenText(url, redacted=redacted)
+
+
+def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None:
+    """Protection of pip.exe from modification on Windows
+
+    On Windows, any operation modifying pip should be run as:
+        python -m pip ...
+    """
+    pip_names = [
+        "pip",
+        f"pip{sys.version_info.major}",
+        f"pip{sys.version_info.major}.{sys.version_info.minor}",
+    ]
+
+    # See https://github.com/pypa/pip/issues/1299 for more discussion
+    should_show_use_python_msg = (
+        modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names
+    )
+
+    if should_show_use_python_msg:
+        new_command = [sys.executable, "-m", "pip"] + sys.argv[1:]
+        raise CommandError(
+            "To modify pip, please run the following command:\n{}".format(
+                " ".join(new_command)
+            )
+        )
+
+
+def is_console_interactive() -> bool:
+    """Is this console interactive?"""
+    return sys.stdin is not None and sys.stdin.isatty()
+
+
+def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]:
+    """Return (hash, length) for path using hashlib.sha256()"""
+
+    h = hashlib.sha256()
+    length = 0
+    with open(path, "rb") as f:
+        for block in read_chunks(f, size=blocksize):
+            length += len(block)
+            h.update(block)
+    return h, length
+
+
+def is_wheel_installed() -> bool:
+    """
+    Return whether the wheel package is installed.
+    """
+    try:
+        import wheel  # noqa: F401
+    except ImportError:
+        return False
+
+    return True
+
+
+def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]:
+    """
+    Return paired elements.
+
+    For example:
+        s -> (s0, s1), (s2, s3), (s4, s5), ...
+    """
+    iterable = iter(iterable)
+    return zip_longest(iterable, iterable)
+
+
+def partition(
+    pred: Callable[[T], bool],
+    iterable: Iterable[T],
+) -> Tuple[Iterable[T], Iterable[T]]:
+    """
+    Use a predicate to partition entries into false entries and true entries,
+    like
+
+        partition(is_odd, range(10)) --> 0 2 4 6 8   and  1 3 5 7 9
+    """
+    t1, t2 = tee(iterable)
+    return filterfalse(pred, t1), filter(pred, t2)
+
+
+class ConfiguredPep517HookCaller(Pep517HookCaller):
+    def __init__(
+        self,
+        config_holder: Any,
+        source_dir: str,
+        build_backend: str,
+        backend_path: Optional[str] = None,
+        runner: Optional[Callable[..., None]] = None,
+        python_executable: Optional[str] = None,
+    ):
+        super().__init__(
+            source_dir, build_backend, backend_path, runner, python_executable
+        )
+        self.config_holder = config_holder
+
+    def build_wheel(
+        self,
+        wheel_directory: str,
+        config_settings: Optional[Dict[str, str]] = None,
+        metadata_directory: Optional[str] = None,
+    ) -> str:
+        cs = self.config_holder.config_settings
+        return super().build_wheel(
+            wheel_directory, config_settings=cs, metadata_directory=metadata_directory
+        )
+
+    def build_sdist(
+        self, sdist_directory: str, config_settings: Optional[Dict[str, str]] = None
+    ) -> str:
+        cs = self.config_holder.config_settings
+        return super().build_sdist(sdist_directory, config_settings=cs)
+
+    def build_editable(
+        self,
+        wheel_directory: str,
+        config_settings: Optional[Dict[str, str]] = None,
+        metadata_directory: Optional[str] = None,
+    ) -> str:
+        cs = self.config_holder.config_settings
+        return super().build_editable(
+            wheel_directory, config_settings=cs, metadata_directory=metadata_directory
+        )
+
+    def get_requires_for_build_wheel(
+        self, config_settings: Optional[Dict[str, str]] = None
+    ) -> List[str]:
+        cs = self.config_holder.config_settings
+        return super().get_requires_for_build_wheel(config_settings=cs)
+
+    def get_requires_for_build_sdist(
+        self, config_settings: Optional[Dict[str, str]] = None
+    ) -> List[str]:
+        cs = self.config_holder.config_settings
+        return super().get_requires_for_build_sdist(config_settings=cs)
+
+    def get_requires_for_build_editable(
+        self, config_settings: Optional[Dict[str, str]] = None
+    ) -> List[str]:
+        cs = self.config_holder.config_settings
+        return super().get_requires_for_build_editable(config_settings=cs)
+
+    def prepare_metadata_for_build_wheel(
+        self,
+        metadata_directory: str,
+        config_settings: Optional[Dict[str, str]] = None,
+        _allow_fallback: bool = True,
+    ) -> str:
+        cs = self.config_holder.config_settings
+        return super().prepare_metadata_for_build_wheel(
+            metadata_directory=metadata_directory,
+            config_settings=cs,
+            _allow_fallback=_allow_fallback,
+        )
+
+    def prepare_metadata_for_build_editable(
+        self,
+        metadata_directory: str,
+        config_settings: Optional[Dict[str, str]] = None,
+        _allow_fallback: bool = True,
+    ) -> str:
+        cs = self.config_holder.config_settings
+        return super().prepare_metadata_for_build_editable(
+            metadata_directory=metadata_directory,
+            config_settings=cs,
+            _allow_fallback=_allow_fallback,
+        )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/models.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/models.py
new file mode 100644
index 0000000..b6bb21a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/models.py
@@ -0,0 +1,39 @@
+"""Utilities for defining models
+"""
+
+import operator
+from typing import Any, Callable, Type
+
+
+class KeyBasedCompareMixin:
+    """Provides comparison capabilities that is based on a key"""
+
+    __slots__ = ["_compare_key", "_defining_class"]
+
+    def __init__(self, key: Any, defining_class: Type["KeyBasedCompareMixin"]) -> None:
+        self._compare_key = key
+        self._defining_class = defining_class
+
+    def __hash__(self) -> int:
+        return hash(self._compare_key)
+
+    def __lt__(self, other: Any) -> bool:
+        return self._compare(other, operator.__lt__)
+
+    def __le__(self, other: Any) -> bool:
+        return self._compare(other, operator.__le__)
+
+    def __gt__(self, other: Any) -> bool:
+        return self._compare(other, operator.__gt__)
+
+    def __ge__(self, other: Any) -> bool:
+        return self._compare(other, operator.__ge__)
+
+    def __eq__(self, other: Any) -> bool:
+        return self._compare(other, operator.__eq__)
+
+    def _compare(self, other: Any, method: Callable[[Any, Any], bool]) -> bool:
+        if not isinstance(other, self._defining_class):
+            return NotImplemented
+
+        return method(self._compare_key, other._compare_key)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/packaging.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/packaging.py
new file mode 100644
index 0000000..b9f6af4
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/packaging.py
@@ -0,0 +1,57 @@
+import functools
+import logging
+import re
+from typing import NewType, Optional, Tuple, cast
+
+from pip._vendor.packaging import specifiers, version
+from pip._vendor.packaging.requirements import Requirement
+
+NormalizedExtra = NewType("NormalizedExtra", str)
+
+logger = logging.getLogger(__name__)
+
+
+def check_requires_python(
+    requires_python: Optional[str], version_info: Tuple[int, ...]
+) -> bool:
+    """
+    Check if the given Python version matches a "Requires-Python" specifier.
+
+    :param version_info: A 3-tuple of ints representing a Python
+        major-minor-micro version to check (e.g. `sys.version_info[:3]`).
+
+    :return: `True` if the given Python version satisfies the requirement.
+        Otherwise, return `False`.
+
+    :raises InvalidSpecifier: If `requires_python` has an invalid format.
+    """
+    if requires_python is None:
+        # The package provides no information
+        return True
+    requires_python_specifier = specifiers.SpecifierSet(requires_python)
+
+    python_version = version.parse(".".join(map(str, version_info)))
+    return python_version in requires_python_specifier
+
+
+@functools.lru_cache(maxsize=512)
+def get_requirement(req_string: str) -> Requirement:
+    """Construct a packaging.Requirement object with caching"""
+    # Parsing requirement strings is expensive, and is also expected to happen
+    # with a low diversity of different arguments (at least relative the number
+    # constructed). This method adds a cache to requirement object creation to
+    # minimize repeated parsing of the same string to construct equivalent
+    # Requirement objects.
+    return Requirement(req_string)
+
+
+def safe_extra(extra: str) -> NormalizedExtra:
+    """Convert an arbitrary string to a standard 'extra' name
+
+    Any runs of non-alphanumeric characters are replaced with a single '_',
+    and the result is always lowercased.
+
+    This function is duplicated from ``pkg_resources``. Note that this is not
+    the same to either ``canonicalize_name`` or ``_egg_link_name``.
+    """
+    return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower())
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/setuptools_build.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/setuptools_build.py
new file mode 100644
index 0000000..01ef4a4
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/setuptools_build.py
@@ -0,0 +1,195 @@
+import sys
+import textwrap
+from typing import List, Optional, Sequence
+
+# Shim to wrap setup.py invocation with setuptools
+# Note that __file__ is handled via two {!r} *and* %r, to ensure that paths on
+# Windows are correctly handled (it should be "C:\\Users" not "C:\Users").
+_SETUPTOOLS_SHIM = textwrap.dedent(
+    """
+    exec(compile('''
+    # This is <pip-setuptools-caller> -- a caller that pip uses to run setup.py
+    #
+    # - It imports setuptools before invoking setup.py, to enable projects that directly
+    #   import from `distutils.core` to work with newer packaging standards.
+    # - It provides a clear error message when setuptools is not installed.
+    # - It sets `sys.argv[0]` to the underlying `setup.py`, when invoking `setup.py` so
+    #   setuptools doesn't think the script is `-c`. This avoids the following warning:
+    #     manifest_maker: standard file '-c' not found".
+    # - It generates a shim setup.py, for handling setup.cfg-only projects.
+    import os, sys, tokenize
+
+    try:
+        import setuptools
+    except ImportError as error:
+        print(
+            "ERROR: Can not execute `setup.py` since setuptools is not available in "
+            "the build environment.",
+            file=sys.stderr,
+        )
+        sys.exit(1)
+
+    __file__ = %r
+    sys.argv[0] = __file__
+
+    if os.path.exists(__file__):
+        filename = __file__
+        with tokenize.open(__file__) as f:
+            setup_py_code = f.read()
+    else:
+        filename = "<auto-generated setuptools caller>"
+        setup_py_code = "from setuptools import setup; setup()"
+
+    exec(compile(setup_py_code, filename, "exec"))
+    ''' % ({!r},), "<pip-setuptools-caller>", "exec"))
+    """
+).rstrip()
+
+
+def make_setuptools_shim_args(
+    setup_py_path: str,
+    global_options: Optional[Sequence[str]] = None,
+    no_user_config: bool = False,
+    unbuffered_output: bool = False,
+) -> List[str]:
+    """
+    Get setuptools command arguments with shim wrapped setup file invocation.
+
+    :param setup_py_path: The path to setup.py to be wrapped.
+    :param global_options: Additional global options.
+    :param no_user_config: If True, disables personal user configuration.
+    :param unbuffered_output: If True, adds the unbuffered switch to the
+     argument list.
+    """
+    args = [sys.executable]
+    if unbuffered_output:
+        args += ["-u"]
+    args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)]
+    if global_options:
+        args += global_options
+    if no_user_config:
+        args += ["--no-user-cfg"]
+    return args
+
+
+def make_setuptools_bdist_wheel_args(
+    setup_py_path: str,
+    global_options: Sequence[str],
+    build_options: Sequence[str],
+    destination_dir: str,
+) -> List[str]:
+    # NOTE: Eventually, we'd want to also -S to the flags here, when we're
+    # isolating. Currently, it breaks Python in virtualenvs, because it
+    # relies on site.py to find parts of the standard library outside the
+    # virtualenv.
+    args = make_setuptools_shim_args(
+        setup_py_path, global_options=global_options, unbuffered_output=True
+    )
+    args += ["bdist_wheel", "-d", destination_dir]
+    args += build_options
+    return args
+
+
+def make_setuptools_clean_args(
+    setup_py_path: str,
+    global_options: Sequence[str],
+) -> List[str]:
+    args = make_setuptools_shim_args(
+        setup_py_path, global_options=global_options, unbuffered_output=True
+    )
+    args += ["clean", "--all"]
+    return args
+
+
+def make_setuptools_develop_args(
+    setup_py_path: str,
+    global_options: Sequence[str],
+    install_options: Sequence[str],
+    no_user_config: bool,
+    prefix: Optional[str],
+    home: Optional[str],
+    use_user_site: bool,
+) -> List[str]:
+    assert not (use_user_site and prefix)
+
+    args = make_setuptools_shim_args(
+        setup_py_path,
+        global_options=global_options,
+        no_user_config=no_user_config,
+    )
+
+    args += ["develop", "--no-deps"]
+
+    args += install_options
+
+    if prefix:
+        args += ["--prefix", prefix]
+    if home is not None:
+        args += ["--install-dir", home]
+
+    if use_user_site:
+        args += ["--user", "--prefix="]
+
+    return args
+
+
+def make_setuptools_egg_info_args(
+    setup_py_path: str,
+    egg_info_dir: Optional[str],
+    no_user_config: bool,
+) -> List[str]:
+    args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config)
+
+    args += ["egg_info"]
+
+    if egg_info_dir:
+        args += ["--egg-base", egg_info_dir]
+
+    return args
+
+
+def make_setuptools_install_args(
+    setup_py_path: str,
+    global_options: Sequence[str],
+    install_options: Sequence[str],
+    record_filename: str,
+    root: Optional[str],
+    prefix: Optional[str],
+    header_dir: Optional[str],
+    home: Optional[str],
+    use_user_site: bool,
+    no_user_config: bool,
+    pycompile: bool,
+) -> List[str]:
+    assert not (use_user_site and prefix)
+    assert not (use_user_site and root)
+
+    args = make_setuptools_shim_args(
+        setup_py_path,
+        global_options=global_options,
+        no_user_config=no_user_config,
+        unbuffered_output=True,
+    )
+    args += ["install", "--record", record_filename]
+    args += ["--single-version-externally-managed"]
+
+    if root is not None:
+        args += ["--root", root]
+    if prefix is not None:
+        args += ["--prefix", prefix]
+    if home is not None:
+        args += ["--home", home]
+    if use_user_site:
+        args += ["--user", "--prefix="]
+
+    if pycompile:
+        args += ["--compile"]
+    else:
+        args += ["--no-compile"]
+
+    if header_dir:
+        args += ["--install-headers", header_dir]
+
+    args += install_options
+
+    return args
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py
new file mode 100644
index 0000000..cf5bf6b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/subprocess.py
@@ -0,0 +1,260 @@
+import logging
+import os
+import shlex
+import subprocess
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Iterable,
+    List,
+    Mapping,
+    Optional,
+    Union,
+)
+
+from pip._vendor.rich.markup import escape
+
+from pip._internal.cli.spinners import SpinnerInterface, open_spinner
+from pip._internal.exceptions import InstallationSubprocessError
+from pip._internal.utils.logging import VERBOSE, subprocess_logger
+from pip._internal.utils.misc import HiddenText
+
+if TYPE_CHECKING:
+    # Literal was introduced in Python 3.8.
+    #
+    # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7.
+    from typing import Literal
+
+CommandArgs = List[Union[str, HiddenText]]
+
+
+def make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs:
+    """
+    Create a CommandArgs object.
+    """
+    command_args: CommandArgs = []
+    for arg in args:
+        # Check for list instead of CommandArgs since CommandArgs is
+        # only known during type-checking.
+        if isinstance(arg, list):
+            command_args.extend(arg)
+        else:
+            # Otherwise, arg is str or HiddenText.
+            command_args.append(arg)
+
+    return command_args
+
+
+def format_command_args(args: Union[List[str], CommandArgs]) -> str:
+    """
+    Format command arguments for display.
+    """
+    # For HiddenText arguments, display the redacted form by calling str().
+    # Also, we don't apply str() to arguments that aren't HiddenText since
+    # this can trigger a UnicodeDecodeError in Python 2 if the argument
+    # has type unicode and includes a non-ascii character.  (The type
+    # checker doesn't ensure the annotations are correct in all cases.)
+    return " ".join(
+        shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg)
+        for arg in args
+    )
+
+
+def reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]:
+    """
+    Return the arguments in their raw, unredacted form.
+    """
+    return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args]
+
+
+def call_subprocess(
+    cmd: Union[List[str], CommandArgs],
+    show_stdout: bool = False,
+    cwd: Optional[str] = None,
+    on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
+    extra_ok_returncodes: Optional[Iterable[int]] = None,
+    extra_environ: Optional[Mapping[str, Any]] = None,
+    unset_environ: Optional[Iterable[str]] = None,
+    spinner: Optional[SpinnerInterface] = None,
+    log_failed_cmd: Optional[bool] = True,
+    stdout_only: Optional[bool] = False,
+    *,
+    command_desc: str,
+) -> str:
+    """
+    Args:
+      show_stdout: if true, use INFO to log the subprocess's stderr and
+        stdout streams.  Otherwise, use DEBUG.  Defaults to False.
+      extra_ok_returncodes: an iterable of integer return codes that are
+        acceptable, in addition to 0. Defaults to None, which means [].
+      unset_environ: an iterable of environment variable names to unset
+        prior to calling subprocess.Popen().
+      log_failed_cmd: if false, failed commands are not logged, only raised.
+      stdout_only: if true, return only stdout, else return both. When true,
+        logging of both stdout and stderr occurs when the subprocess has
+        terminated, else logging occurs as subprocess output is produced.
+    """
+    if extra_ok_returncodes is None:
+        extra_ok_returncodes = []
+    if unset_environ is None:
+        unset_environ = []
+    # Most places in pip use show_stdout=False. What this means is--
+    #
+    # - We connect the child's output (combined stderr and stdout) to a
+    #   single pipe, which we read.
+    # - We log this output to stderr at DEBUG level as it is received.
+    # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't
+    #   requested), then we show a spinner so the user can still see the
+    #   subprocess is in progress.
+    # - If the subprocess exits with an error, we log the output to stderr
+    #   at ERROR level if it hasn't already been displayed to the console
+    #   (e.g. if --verbose logging wasn't enabled).  This way we don't log
+    #   the output to the console twice.
+    #
+    # If show_stdout=True, then the above is still done, but with DEBUG
+    # replaced by INFO.
+    if show_stdout:
+        # Then log the subprocess output at INFO level.
+        log_subprocess: Callable[..., None] = subprocess_logger.info
+        used_level = logging.INFO
+    else:
+        # Then log the subprocess output using VERBOSE.  This also ensures
+        # it will be logged to the log file (aka user_log), if enabled.
+        log_subprocess = subprocess_logger.verbose
+        used_level = VERBOSE
+
+    # Whether the subprocess will be visible in the console.
+    showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level
+
+    # Only use the spinner if we're not showing the subprocess output
+    # and we have a spinner.
+    use_spinner = not showing_subprocess and spinner is not None
+
+    log_subprocess("Running command %s", command_desc)
+    env = os.environ.copy()
+    if extra_environ:
+        env.update(extra_environ)
+    for name in unset_environ:
+        env.pop(name, None)
+    try:
+        proc = subprocess.Popen(
+            # Convert HiddenText objects to the underlying str.
+            reveal_command_args(cmd),
+            stdin=subprocess.PIPE,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE,
+            cwd=cwd,
+            env=env,
+            errors="backslashreplace",
+        )
+    except Exception as exc:
+        if log_failed_cmd:
+            subprocess_logger.critical(
+                "Error %s while executing command %s",
+                exc,
+                command_desc,
+            )
+        raise
+    all_output = []
+    if not stdout_only:
+        assert proc.stdout
+        assert proc.stdin
+        proc.stdin.close()
+        # In this mode, stdout and stderr are in the same pipe.
+        while True:
+            line: str = proc.stdout.readline()
+            if not line:
+                break
+            line = line.rstrip()
+            all_output.append(line + "\n")
+
+            # Show the line immediately.
+            log_subprocess(line)
+            # Update the spinner.
+            if use_spinner:
+                assert spinner
+                spinner.spin()
+        try:
+            proc.wait()
+        finally:
+            if proc.stdout:
+                proc.stdout.close()
+        output = "".join(all_output)
+    else:
+        # In this mode, stdout and stderr are in different pipes.
+        # We must use communicate() which is the only safe way to read both.
+        out, err = proc.communicate()
+        # log line by line to preserve pip log indenting
+        for out_line in out.splitlines():
+            log_subprocess(out_line)
+        all_output.append(out)
+        for err_line in err.splitlines():
+            log_subprocess(err_line)
+        all_output.append(err)
+        output = out
+
+    proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes
+    if use_spinner:
+        assert spinner
+        if proc_had_error:
+            spinner.finish("error")
+        else:
+            spinner.finish("done")
+    if proc_had_error:
+        if on_returncode == "raise":
+            error = InstallationSubprocessError(
+                command_description=command_desc,
+                exit_code=proc.returncode,
+                output_lines=all_output if not showing_subprocess else None,
+            )
+            if log_failed_cmd:
+                subprocess_logger.error("[present-rich] %s", error)
+                subprocess_logger.verbose(
+                    "[bold magenta]full command[/]: [blue]%s[/]",
+                    escape(format_command_args(cmd)),
+                    extra={"markup": True},
+                )
+                subprocess_logger.verbose(
+                    "[bold magenta]cwd[/]: %s",
+                    escape(cwd or "[inherit]"),
+                    extra={"markup": True},
+                )
+
+            raise error
+        elif on_returncode == "warn":
+            subprocess_logger.warning(
+                'Command "%s" had error code %s in %s',
+                command_desc,
+                proc.returncode,
+                cwd,
+            )
+        elif on_returncode == "ignore":
+            pass
+        else:
+            raise ValueError(f"Invalid value: on_returncode={on_returncode!r}")
+    return output
+
+
+def runner_with_spinner_message(message: str) -> Callable[..., None]:
+    """Provide a subprocess_runner that shows a spinner message.
+
+    Intended for use with for pep517's Pep517HookCaller. Thus, the runner has
+    an API that matches what's expected by Pep517HookCaller.subprocess_runner.
+    """
+
+    def runner(
+        cmd: List[str],
+        cwd: Optional[str] = None,
+        extra_environ: Optional[Mapping[str, Any]] = None,
+    ) -> None:
+        with open_spinner(message) as spinner:
+            call_subprocess(
+                cmd,
+                command_desc=message,
+                cwd=cwd,
+                extra_environ=extra_environ,
+                spinner=spinner,
+            )
+
+    return runner
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/temp_dir.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/temp_dir.py
new file mode 100644
index 0000000..8ee8a1c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/temp_dir.py
@@ -0,0 +1,246 @@
+import errno
+import itertools
+import logging
+import os.path
+import tempfile
+from contextlib import ExitStack, contextmanager
+from typing import Any, Dict, Generator, Optional, TypeVar, Union
+
+from pip._internal.utils.misc import enum, rmtree
+
+logger = logging.getLogger(__name__)
+
+_T = TypeVar("_T", bound="TempDirectory")
+
+
+# Kinds of temporary directories. Only needed for ones that are
+# globally-managed.
+tempdir_kinds = enum(
+    BUILD_ENV="build-env",
+    EPHEM_WHEEL_CACHE="ephem-wheel-cache",
+    REQ_BUILD="req-build",
+)
+
+
+_tempdir_manager: Optional[ExitStack] = None
+
+
+@contextmanager
+def global_tempdir_manager() -> Generator[None, None, None]:
+    global _tempdir_manager
+    with ExitStack() as stack:
+        old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack
+        try:
+            yield
+        finally:
+            _tempdir_manager = old_tempdir_manager
+
+
+class TempDirectoryTypeRegistry:
+    """Manages temp directory behavior"""
+
+    def __init__(self) -> None:
+        self._should_delete: Dict[str, bool] = {}
+
+    def set_delete(self, kind: str, value: bool) -> None:
+        """Indicate whether a TempDirectory of the given kind should be
+        auto-deleted.
+        """
+        self._should_delete[kind] = value
+
+    def get_delete(self, kind: str) -> bool:
+        """Get configured auto-delete flag for a given TempDirectory type,
+        default True.
+        """
+        return self._should_delete.get(kind, True)
+
+
+_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None
+
+
+@contextmanager
+def tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]:
+    """Provides a scoped global tempdir registry that can be used to dictate
+    whether directories should be deleted.
+    """
+    global _tempdir_registry
+    old_tempdir_registry = _tempdir_registry
+    _tempdir_registry = TempDirectoryTypeRegistry()
+    try:
+        yield _tempdir_registry
+    finally:
+        _tempdir_registry = old_tempdir_registry
+
+
+class _Default:
+    pass
+
+
+_default = _Default()
+
+
+class TempDirectory:
+    """Helper class that owns and cleans up a temporary directory.
+
+    This class can be used as a context manager or as an OO representation of a
+    temporary directory.
+
+    Attributes:
+        path
+            Location to the created temporary directory
+        delete
+            Whether the directory should be deleted when exiting
+            (when used as a contextmanager)
+
+    Methods:
+        cleanup()
+            Deletes the temporary directory
+
+    When used as a context manager, if the delete attribute is True, on
+    exiting the context the temporary directory is deleted.
+    """
+
+    def __init__(
+        self,
+        path: Optional[str] = None,
+        delete: Union[bool, None, _Default] = _default,
+        kind: str = "temp",
+        globally_managed: bool = False,
+    ):
+        super().__init__()
+
+        if delete is _default:
+            if path is not None:
+                # If we were given an explicit directory, resolve delete option
+                # now.
+                delete = False
+            else:
+                # Otherwise, we wait until cleanup and see what
+                # tempdir_registry says.
+                delete = None
+
+        # The only time we specify path is in for editables where it
+        # is the value of the --src option.
+        if path is None:
+            path = self._create(kind)
+
+        self._path = path
+        self._deleted = False
+        self.delete = delete
+        self.kind = kind
+
+        if globally_managed:
+            assert _tempdir_manager is not None
+            _tempdir_manager.enter_context(self)
+
+    @property
+    def path(self) -> str:
+        assert not self._deleted, f"Attempted to access deleted path: {self._path}"
+        return self._path
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__} {self.path!r}>"
+
+    def __enter__(self: _T) -> _T:
+        return self
+
+    def __exit__(self, exc: Any, value: Any, tb: Any) -> None:
+        if self.delete is not None:
+            delete = self.delete
+        elif _tempdir_registry:
+            delete = _tempdir_registry.get_delete(self.kind)
+        else:
+            delete = True
+
+        if delete:
+            self.cleanup()
+
+    def _create(self, kind: str) -> str:
+        """Create a temporary directory and store its path in self.path"""
+        # We realpath here because some systems have their default tmpdir
+        # symlinked to another directory.  This tends to confuse build
+        # scripts, so we canonicalize the path by traversing potential
+        # symlinks here.
+        path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
+        logger.debug("Created temporary directory: %s", path)
+        return path
+
+    def cleanup(self) -> None:
+        """Remove the temporary directory created and reset state"""
+        self._deleted = True
+        if not os.path.exists(self._path):
+            return
+        rmtree(self._path)
+
+
+class AdjacentTempDirectory(TempDirectory):
+    """Helper class that creates a temporary directory adjacent to a real one.
+
+    Attributes:
+        original
+            The original directory to create a temp directory for.
+        path
+            After calling create() or entering, contains the full
+            path to the temporary directory.
+        delete
+            Whether the directory should be deleted when exiting
+            (when used as a contextmanager)
+
+    """
+
+    # The characters that may be used to name the temp directory
+    # We always prepend a ~ and then rotate through these until
+    # a usable name is found.
+    # pkg_resources raises a different error for .dist-info folder
+    # with leading '-' and invalid metadata
+    LEADING_CHARS = "-~.=%0123456789"
+
+    def __init__(self, original: str, delete: Optional[bool] = None) -> None:
+        self.original = original.rstrip("/\\")
+        super().__init__(delete=delete)
+
+    @classmethod
+    def _generate_names(cls, name: str) -> Generator[str, None, None]:
+        """Generates a series of temporary names.
+
+        The algorithm replaces the leading characters in the name
+        with ones that are valid filesystem characters, but are not
+        valid package names (for both Python and pip definitions of
+        package).
+        """
+        for i in range(1, len(name)):
+            for candidate in itertools.combinations_with_replacement(
+                cls.LEADING_CHARS, i - 1
+            ):
+                new_name = "~" + "".join(candidate) + name[i:]
+                if new_name != name:
+                    yield new_name
+
+        # If we make it this far, we will have to make a longer name
+        for i in range(len(cls.LEADING_CHARS)):
+            for candidate in itertools.combinations_with_replacement(
+                cls.LEADING_CHARS, i
+            ):
+                new_name = "~" + "".join(candidate) + name
+                if new_name != name:
+                    yield new_name
+
+    def _create(self, kind: str) -> str:
+        root, name = os.path.split(self.original)
+        for candidate in self._generate_names(name):
+            path = os.path.join(root, candidate)
+            try:
+                os.mkdir(path)
+            except OSError as ex:
+                # Continue if the name exists already
+                if ex.errno != errno.EEXIST:
+                    raise
+            else:
+                path = os.path.realpath(path)
+                break
+        else:
+            # Final fallback on the default behavior.
+            path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
+
+        logger.debug("Created temporary directory: %s", path)
+        return path
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/unpacking.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/unpacking.py
new file mode 100644
index 0000000..78b5c13
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/unpacking.py
@@ -0,0 +1,257 @@
+"""Utilities related archives.
+"""
+
+import logging
+import os
+import shutil
+import stat
+import tarfile
+import zipfile
+from typing import Iterable, List, Optional
+from zipfile import ZipInfo
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.filetypes import (
+    BZ2_EXTENSIONS,
+    TAR_EXTENSIONS,
+    XZ_EXTENSIONS,
+    ZIP_EXTENSIONS,
+)
+from pip._internal.utils.misc import ensure_dir
+
+logger = logging.getLogger(__name__)
+
+
+SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
+
+try:
+    import bz2  # noqa
+
+    SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
+except ImportError:
+    logger.debug("bz2 module is not available")
+
+try:
+    # Only for Python 3.3+
+    import lzma  # noqa
+
+    SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
+except ImportError:
+    logger.debug("lzma module is not available")
+
+
+def current_umask() -> int:
+    """Get the current umask which involves having to set it temporarily."""
+    mask = os.umask(0)
+    os.umask(mask)
+    return mask
+
+
+def split_leading_dir(path: str) -> List[str]:
+    path = path.lstrip("/").lstrip("\\")
+    if "/" in path and (
+        ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path
+    ):
+        return path.split("/", 1)
+    elif "\\" in path:
+        return path.split("\\", 1)
+    else:
+        return [path, ""]
+
+
+def has_leading_dir(paths: Iterable[str]) -> bool:
+    """Returns true if all the paths have the same leading path name
+    (i.e., everything is in one subdirectory in an archive)"""
+    common_prefix = None
+    for path in paths:
+        prefix, rest = split_leading_dir(path)
+        if not prefix:
+            return False
+        elif common_prefix is None:
+            common_prefix = prefix
+        elif prefix != common_prefix:
+            return False
+    return True
+
+
+def is_within_directory(directory: str, target: str) -> bool:
+    """
+    Return true if the absolute path of target is within the directory
+    """
+    abs_directory = os.path.abspath(directory)
+    abs_target = os.path.abspath(target)
+
+    prefix = os.path.commonprefix([abs_directory, abs_target])
+    return prefix == abs_directory
+
+
+def set_extracted_file_to_default_mode_plus_executable(path: str) -> None:
+    """
+    Make file present at path have execute for user/group/world
+    (chmod +x) is no-op on windows per python docs
+    """
+    os.chmod(path, (0o777 & ~current_umask() | 0o111))
+
+
+def zip_item_is_executable(info: ZipInfo) -> bool:
+    mode = info.external_attr >> 16
+    # if mode and regular file and any execute permissions for
+    # user/group/world?
+    return bool(mode and stat.S_ISREG(mode) and mode & 0o111)
+
+
+def unzip_file(filename: str, location: str, flatten: bool = True) -> None:
+    """
+    Unzip the file (with path `filename`) to the destination `location`.  All
+    files are written based on system defaults and umask (i.e. permissions are
+    not preserved), except that regular file members with any execute
+    permissions (user, group, or world) have "chmod +x" applied after being
+    written. Note that for windows, any execute changes using os.chmod are
+    no-ops per the python docs.
+    """
+    ensure_dir(location)
+    zipfp = open(filename, "rb")
+    try:
+        zip = zipfile.ZipFile(zipfp, allowZip64=True)
+        leading = has_leading_dir(zip.namelist()) and flatten
+        for info in zip.infolist():
+            name = info.filename
+            fn = name
+            if leading:
+                fn = split_leading_dir(name)[1]
+            fn = os.path.join(location, fn)
+            dir = os.path.dirname(fn)
+            if not is_within_directory(location, fn):
+                message = (
+                    "The zip file ({}) has a file ({}) trying to install "
+                    "outside target directory ({})"
+                )
+                raise InstallationError(message.format(filename, fn, location))
+            if fn.endswith("/") or fn.endswith("\\"):
+                # A directory
+                ensure_dir(fn)
+            else:
+                ensure_dir(dir)
+                # Don't use read() to avoid allocating an arbitrarily large
+                # chunk of memory for the file's content
+                fp = zip.open(name)
+                try:
+                    with open(fn, "wb") as destfp:
+                        shutil.copyfileobj(fp, destfp)
+                finally:
+                    fp.close()
+                    if zip_item_is_executable(info):
+                        set_extracted_file_to_default_mode_plus_executable(fn)
+    finally:
+        zipfp.close()
+
+
+def untar_file(filename: str, location: str) -> None:
+    """
+    Untar the file (with path `filename`) to the destination `location`.
+    All files are written based on system defaults and umask (i.e. permissions
+    are not preserved), except that regular file members with any execute
+    permissions (user, group, or world) have "chmod +x" applied after being
+    written.  Note that for windows, any execute changes using os.chmod are
+    no-ops per the python docs.
+    """
+    ensure_dir(location)
+    if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"):
+        mode = "r:gz"
+    elif filename.lower().endswith(BZ2_EXTENSIONS):
+        mode = "r:bz2"
+    elif filename.lower().endswith(XZ_EXTENSIONS):
+        mode = "r:xz"
+    elif filename.lower().endswith(".tar"):
+        mode = "r"
+    else:
+        logger.warning(
+            "Cannot determine compression type for file %s",
+            filename,
+        )
+        mode = "r:*"
+    tar = tarfile.open(filename, mode, encoding="utf-8")
+    try:
+        leading = has_leading_dir([member.name for member in tar.getmembers()])
+        for member in tar.getmembers():
+            fn = member.name
+            if leading:
+                fn = split_leading_dir(fn)[1]
+            path = os.path.join(location, fn)
+            if not is_within_directory(location, path):
+                message = (
+                    "The tar file ({}) has a file ({}) trying to install "
+                    "outside target directory ({})"
+                )
+                raise InstallationError(message.format(filename, path, location))
+            if member.isdir():
+                ensure_dir(path)
+            elif member.issym():
+                try:
+                    tar._extract_member(member, path)
+                except Exception as exc:
+                    # Some corrupt tar files seem to produce this
+                    # (specifically bad symlinks)
+                    logger.warning(
+                        "In the tar file %s the member %s is invalid: %s",
+                        filename,
+                        member.name,
+                        exc,
+                    )
+                    continue
+            else:
+                try:
+                    fp = tar.extractfile(member)
+                except (KeyError, AttributeError) as exc:
+                    # Some corrupt tar files seem to produce this
+                    # (specifically bad symlinks)
+                    logger.warning(
+                        "In the tar file %s the member %s is invalid: %s",
+                        filename,
+                        member.name,
+                        exc,
+                    )
+                    continue
+                ensure_dir(os.path.dirname(path))
+                assert fp is not None
+                with open(path, "wb") as destfp:
+                    shutil.copyfileobj(fp, destfp)
+                fp.close()
+                # Update the timestamp (useful for cython compiled files)
+                tar.utime(member, path)
+                # member have any execute permissions for user/group/world?
+                if member.mode & 0o111:
+                    set_extracted_file_to_default_mode_plus_executable(path)
+    finally:
+        tar.close()
+
+
+def unpack_file(
+    filename: str,
+    location: str,
+    content_type: Optional[str] = None,
+) -> None:
+    filename = os.path.realpath(filename)
+    if (
+        content_type == "application/zip"
+        or filename.lower().endswith(ZIP_EXTENSIONS)
+        or zipfile.is_zipfile(filename)
+    ):
+        unzip_file(filename, location, flatten=not filename.endswith(".whl"))
+    elif (
+        content_type == "application/x-gzip"
+        or tarfile.is_tarfile(filename)
+        or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)
+    ):
+        untar_file(filename, location)
+    else:
+        # FIXME: handle?
+        # FIXME: magic signatures?
+        logger.critical(
+            "Cannot unpack file %s (downloaded from %s, content-type: %s); "
+            "cannot detect archive format",
+            filename,
+            location,
+            content_type,
+        )
+        raise InstallationError(f"Cannot determine archive format of {location}")
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/urls.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/urls.py
new file mode 100644
index 0000000..6ba2e04
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/urls.py
@@ -0,0 +1,62 @@
+import os
+import string
+import urllib.parse
+import urllib.request
+from typing import Optional
+
+from .compat import WINDOWS
+
+
+def get_url_scheme(url: str) -> Optional[str]:
+    if ":" not in url:
+        return None
+    return url.split(":", 1)[0].lower()
+
+
+def path_to_url(path: str) -> str:
+    """
+    Convert a path to a file: URL.  The path will be made absolute and have
+    quoted path parts.
+    """
+    path = os.path.normpath(os.path.abspath(path))
+    url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path))
+    return url
+
+
+def url_to_path(url: str) -> str:
+    """
+    Convert a file: URL to a path.
+    """
+    assert url.startswith(
+        "file:"
+    ), f"You can only turn file: urls into filenames (not {url!r})"
+
+    _, netloc, path, _, _ = urllib.parse.urlsplit(url)
+
+    if not netloc or netloc == "localhost":
+        # According to RFC 8089, same as empty authority.
+        netloc = ""
+    elif WINDOWS:
+        # If we have a UNC path, prepend UNC share notation.
+        netloc = "\\\\" + netloc
+    else:
+        raise ValueError(
+            f"non-local file URIs are not supported on this platform: {url!r}"
+        )
+
+    path = urllib.request.url2pathname(netloc + path)
+
+    # On Windows, urlsplit parses the path as something like "/C:/Users/foo".
+    # This creates issues for path-related functions like io.open(), so we try
+    # to detect and strip the leading slash.
+    if (
+        WINDOWS
+        and not netloc  # Not UNC.
+        and len(path) >= 3
+        and path[0] == "/"  # Leading slash to strip.
+        and path[1] in string.ascii_letters  # Drive letter.
+        and path[2:4] in (":", ":/")  # Colon + end of string, or colon + absolute path.
+    ):
+        path = path[1:]
+
+    return path
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/virtualenv.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/virtualenv.py
new file mode 100644
index 0000000..c926db4
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/virtualenv.py
@@ -0,0 +1,104 @@
+import logging
+import os
+import re
+import site
+import sys
+from typing import List, Optional
+
+logger = logging.getLogger(__name__)
+_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(
+    r"include-system-site-packages\s*=\s*(?P<value>true|false)"
+)
+
+
+def _running_under_venv() -> bool:
+    """Checks if sys.base_prefix and sys.prefix match.
+
+    This handles PEP 405 compliant virtual environments.
+    """
+    return sys.prefix != getattr(sys, "base_prefix", sys.prefix)
+
+
+def _running_under_regular_virtualenv() -> bool:
+    """Checks if sys.real_prefix is set.
+
+    This handles virtual environments created with pypa's virtualenv.
+    """
+    # pypa/virtualenv case
+    return hasattr(sys, "real_prefix")
+
+
+def running_under_virtualenv() -> bool:
+    """Return True if we're running inside a virtualenv, False otherwise."""
+    return _running_under_venv() or _running_under_regular_virtualenv()
+
+
+def _get_pyvenv_cfg_lines() -> Optional[List[str]]:
+    """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines
+
+    Returns None, if it could not read/access the file.
+    """
+    pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg")
+    try:
+        # Although PEP 405 does not specify, the built-in venv module always
+        # writes with UTF-8. (pypa/pip#8717)
+        with open(pyvenv_cfg_file, encoding="utf-8") as f:
+            return f.read().splitlines()  # avoids trailing newlines
+    except OSError:
+        return None
+
+
+def _no_global_under_venv() -> bool:
+    """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion
+
+    PEP 405 specifies that when system site-packages are not supposed to be
+    visible from a virtual environment, `pyvenv.cfg` must contain the following
+    line:
+
+        include-system-site-packages = false
+
+    Additionally, log a warning if accessing the file fails.
+    """
+    cfg_lines = _get_pyvenv_cfg_lines()
+    if cfg_lines is None:
+        # We're not in a "sane" venv, so assume there is no system
+        # site-packages access (since that's PEP 405's default state).
+        logger.warning(
+            "Could not access 'pyvenv.cfg' despite a virtual environment "
+            "being active. Assuming global site-packages is not accessible "
+            "in this environment."
+        )
+        return True
+
+    for line in cfg_lines:
+        match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)
+        if match is not None and match.group("value") == "false":
+            return True
+    return False
+
+
+def _no_global_under_regular_virtualenv() -> bool:
+    """Check if "no-global-site-packages.txt" exists beside site.py
+
+    This mirrors logic in pypa/virtualenv for determining whether system
+    site-packages are visible in the virtual environment.
+    """
+    site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
+    no_global_site_packages_file = os.path.join(
+        site_mod_dir,
+        "no-global-site-packages.txt",
+    )
+    return os.path.exists(no_global_site_packages_file)
+
+
+def virtualenv_no_global() -> bool:
+    """Returns a boolean, whether running in venv with no system site-packages."""
+    # PEP 405 compliance needs to be checked first since virtualenv >=20 would
+    # return True for both checks, but is only able to use the PEP 405 config.
+    if _running_under_venv():
+        return _no_global_under_venv()
+
+    if _running_under_regular_virtualenv():
+        return _no_global_under_regular_virtualenv()
+
+    return False
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/utils/wheel.py b/venv/lib/python3.9/site-packages/pip/_internal/utils/wheel.py
new file mode 100644
index 0000000..e5e3f34
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/utils/wheel.py
@@ -0,0 +1,136 @@
+"""Support functions for working with wheel files.
+"""
+
+import logging
+from email.message import Message
+from email.parser import Parser
+from typing import Tuple
+from zipfile import BadZipFile, ZipFile
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import UnsupportedWheel
+
+VERSION_COMPATIBLE = (1, 0)
+
+
+logger = logging.getLogger(__name__)
+
+
+def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]:
+    """Extract information from the provided wheel, ensuring it meets basic
+    standards.
+
+    Returns the name of the .dist-info directory and the parsed WHEEL metadata.
+    """
+    try:
+        info_dir = wheel_dist_info_dir(wheel_zip, name)
+        metadata = wheel_metadata(wheel_zip, info_dir)
+        version = wheel_version(metadata)
+    except UnsupportedWheel as e:
+        raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e)))
+
+    check_compatibility(version, name)
+
+    return info_dir, metadata
+
+
+def wheel_dist_info_dir(source: ZipFile, name: str) -> str:
+    """Returns the name of the contained .dist-info directory.
+
+    Raises AssertionError or UnsupportedWheel if not found, >1 found, or
+    it doesn't match the provided name.
+    """
+    # Zip file path separators must be /
+    subdirs = {p.split("/", 1)[0] for p in source.namelist()}
+
+    info_dirs = [s for s in subdirs if s.endswith(".dist-info")]
+
+    if not info_dirs:
+        raise UnsupportedWheel(".dist-info directory not found")
+
+    if len(info_dirs) > 1:
+        raise UnsupportedWheel(
+            "multiple .dist-info directories found: {}".format(", ".join(info_dirs))
+        )
+
+    info_dir = info_dirs[0]
+
+    info_dir_name = canonicalize_name(info_dir)
+    canonical_name = canonicalize_name(name)
+    if not info_dir_name.startswith(canonical_name):
+        raise UnsupportedWheel(
+            ".dist-info directory {!r} does not start with {!r}".format(
+                info_dir, canonical_name
+            )
+        )
+
+    return info_dir
+
+
+def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes:
+    try:
+        return source.read(path)
+        # BadZipFile for general corruption, KeyError for missing entry,
+        # and RuntimeError for password-protected files
+    except (BadZipFile, KeyError, RuntimeError) as e:
+        raise UnsupportedWheel(f"could not read {path!r} file: {e!r}")
+
+
+def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message:
+    """Return the WHEEL metadata of an extracted wheel, if possible.
+    Otherwise, raise UnsupportedWheel.
+    """
+    path = f"{dist_info_dir}/WHEEL"
+    # Zip file path separators must be /
+    wheel_contents = read_wheel_metadata_file(source, path)
+
+    try:
+        wheel_text = wheel_contents.decode()
+    except UnicodeDecodeError as e:
+        raise UnsupportedWheel(f"error decoding {path!r}: {e!r}")
+
+    # FeedParser (used by Parser) does not raise any exceptions. The returned
+    # message may have .defects populated, but for backwards-compatibility we
+    # currently ignore them.
+    return Parser().parsestr(wheel_text)
+
+
+def wheel_version(wheel_data: Message) -> Tuple[int, ...]:
+    """Given WHEEL metadata, return the parsed Wheel-Version.
+    Otherwise, raise UnsupportedWheel.
+    """
+    version_text = wheel_data["Wheel-Version"]
+    if version_text is None:
+        raise UnsupportedWheel("WHEEL is missing Wheel-Version")
+
+    version = version_text.strip()
+
+    try:
+        return tuple(map(int, version.split(".")))
+    except ValueError:
+        raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}")
+
+
+def check_compatibility(version: Tuple[int, ...], name: str) -> None:
+    """Raises errors or warns if called with an incompatible Wheel-Version.
+
+    pip should refuse to install a Wheel-Version that's a major series
+    ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
+    installing a version only minor version ahead (e.g 1.2 > 1.1).
+
+    version: a 2-tuple representing a Wheel-Version (Major, Minor)
+    name: name of wheel or package to raise exception about
+
+    :raises UnsupportedWheel: when an incompatible Wheel-Version is given
+    """
+    if version[0] > VERSION_COMPATIBLE[0]:
+        raise UnsupportedWheel(
+            "{}'s Wheel-Version ({}) is not compatible with this version "
+            "of pip".format(name, ".".join(map(str, version)))
+        )
+    elif version > VERSION_COMPATIBLE:
+        logger.warning(
+            "Installing from a newer Wheel-Version (%s)",
+            ".".join(map(str, version)),
+        )
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/__init__.py b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__init__.py
new file mode 100644
index 0000000..b6beddb
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__init__.py
@@ -0,0 +1,15 @@
+# Expose a limited set of classes and functions so callers outside of
+# the vcs package don't need to import deeper than `pip._internal.vcs`.
+# (The test directory may still need to import from a vcs sub-package.)
+# Import all vcs modules to register each VCS in the VcsSupport object.
+import pip._internal.vcs.bazaar
+import pip._internal.vcs.git
+import pip._internal.vcs.mercurial
+import pip._internal.vcs.subversion  # noqa: F401
+from pip._internal.vcs.versioncontrol import (  # noqa: F401
+    RemoteNotFoundError,
+    RemoteNotValidError,
+    is_url,
+    make_vcs_requirement_url,
+    vcs,
+)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..80bd479
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc
new file mode 100644
index 0000000..80e2b7e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/git.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/git.cpython-39.pyc
new file mode 100644
index 0000000..51e3c79
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/git.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc
new file mode 100644
index 0000000..305bb30
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc
new file mode 100644
index 0000000..4452ce0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc
new file mode 100644
index 0000000..438795d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/bazaar.py b/venv/lib/python3.9/site-packages/pip/_internal/vcs/bazaar.py
new file mode 100644
index 0000000..06c80e4
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/bazaar.py
@@ -0,0 +1,112 @@
+import logging
+from typing import List, Optional, Tuple
+
+from pip._internal.utils.misc import HiddenText, display_path
+from pip._internal.utils.subprocess import make_command
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs.versioncontrol import (
+    AuthInfo,
+    RemoteNotFoundError,
+    RevOptions,
+    VersionControl,
+    vcs,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class Bazaar(VersionControl):
+    name = "bzr"
+    dirname = ".bzr"
+    repo_name = "branch"
+    schemes = (
+        "bzr+http",
+        "bzr+https",
+        "bzr+ssh",
+        "bzr+sftp",
+        "bzr+ftp",
+        "bzr+lp",
+        "bzr+file",
+    )
+
+    @staticmethod
+    def get_base_rev_args(rev: str) -> List[str]:
+        return ["-r", rev]
+
+    def fetch_new(
+        self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
+    ) -> None:
+        rev_display = rev_options.to_display()
+        logger.info(
+            "Checking out %s%s to %s",
+            url,
+            rev_display,
+            display_path(dest),
+        )
+        if verbosity <= 0:
+            flag = "--quiet"
+        elif verbosity == 1:
+            flag = ""
+        else:
+            flag = f"-{'v'*verbosity}"
+        cmd_args = make_command(
+            "checkout", "--lightweight", flag, rev_options.to_args(), url, dest
+        )
+        self.run_command(cmd_args)
+
+    def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        self.run_command(make_command("switch", url), cwd=dest)
+
+    def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        output = self.run_command(
+            make_command("info"), show_stdout=False, stdout_only=True, cwd=dest
+        )
+        if output.startswith("Standalone "):
+            # Older versions of pip used to create standalone branches.
+            # Convert the standalone branch to a checkout by calling "bzr bind".
+            cmd_args = make_command("bind", "-q", url)
+            self.run_command(cmd_args, cwd=dest)
+
+        cmd_args = make_command("update", "-q", rev_options.to_args())
+        self.run_command(cmd_args, cwd=dest)
+
+    @classmethod
+    def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
+        # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
+        url, rev, user_pass = super().get_url_rev_and_auth(url)
+        if url.startswith("ssh://"):
+            url = "bzr+" + url
+        return url, rev, user_pass
+
+    @classmethod
+    def get_remote_url(cls, location: str) -> str:
+        urls = cls.run_command(
+            ["info"], show_stdout=False, stdout_only=True, cwd=location
+        )
+        for line in urls.splitlines():
+            line = line.strip()
+            for x in ("checkout of branch: ", "parent branch: "):
+                if line.startswith(x):
+                    repo = line.split(x)[1]
+                    if cls._is_local_repository(repo):
+                        return path_to_url(repo)
+                    return repo
+        raise RemoteNotFoundError
+
+    @classmethod
+    def get_revision(cls, location: str) -> str:
+        revision = cls.run_command(
+            ["revno"],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        )
+        return revision.splitlines()[-1]
+
+    @classmethod
+    def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
+        """Always assume the versions don't match"""
+        return False
+
+
+vcs.register(Bazaar)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/git.py b/venv/lib/python3.9/site-packages/pip/_internal/vcs/git.py
new file mode 100644
index 0000000..8d1d499
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/git.py
@@ -0,0 +1,526 @@
+import logging
+import os.path
+import pathlib
+import re
+import urllib.parse
+import urllib.request
+from typing import List, Optional, Tuple
+
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.utils.misc import HiddenText, display_path, hide_url
+from pip._internal.utils.subprocess import make_command
+from pip._internal.vcs.versioncontrol import (
+    AuthInfo,
+    RemoteNotFoundError,
+    RemoteNotValidError,
+    RevOptions,
+    VersionControl,
+    find_path_to_project_root_from_repo_root,
+    vcs,
+)
+
+urlsplit = urllib.parse.urlsplit
+urlunsplit = urllib.parse.urlunsplit
+
+
+logger = logging.getLogger(__name__)
+
+
+GIT_VERSION_REGEX = re.compile(
+    r"^git version "  # Prefix.
+    r"(\d+)"  # Major.
+    r"\.(\d+)"  # Dot, minor.
+    r"(?:\.(\d+))?"  # Optional dot, patch.
+    r".*$"  # Suffix, including any pre- and post-release segments we don't care about.
+)
+
+HASH_REGEX = re.compile("^[a-fA-F0-9]{40}$")
+
+# SCP (Secure copy protocol) shorthand. e.g. 'git@example.com:foo/bar.git'
+SCP_REGEX = re.compile(
+    r"""^
+    # Optional user, e.g. 'git@'
+    (\w+@)?
+    # Server, e.g. 'github.com'.
+    ([^/:]+):
+    # The server-side path. e.g. 'user/project.git'. Must start with an
+    # alphanumeric character so as not to be confusable with a Windows paths
+    # like 'C:/foo/bar' or 'C:\foo\bar'.
+    (\w[^:]*)
+    $""",
+    re.VERBOSE,
+)
+
+
+def looks_like_hash(sha: str) -> bool:
+    return bool(HASH_REGEX.match(sha))
+
+
+class Git(VersionControl):
+    name = "git"
+    dirname = ".git"
+    repo_name = "clone"
+    schemes = (
+        "git+http",
+        "git+https",
+        "git+ssh",
+        "git+git",
+        "git+file",
+    )
+    # Prevent the user's environment variables from interfering with pip:
+    # https://github.com/pypa/pip/issues/1130
+    unset_environ = ("GIT_DIR", "GIT_WORK_TREE")
+    default_arg_rev = "HEAD"
+
+    @staticmethod
+    def get_base_rev_args(rev: str) -> List[str]:
+        return [rev]
+
+    def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:
+        _, rev_options = self.get_url_rev_options(hide_url(url))
+        if not rev_options.rev:
+            return False
+        if not self.is_commit_id_equal(dest, rev_options.rev):
+            # the current commit is different from rev,
+            # which means rev was something else than a commit hash
+            return False
+        # return False in the rare case rev is both a commit hash
+        # and a tag or a branch; we don't want to cache in that case
+        # because that branch/tag could point to something else in the future
+        is_tag_or_branch = bool(self.get_revision_sha(dest, rev_options.rev)[0])
+        return not is_tag_or_branch
+
+    def get_git_version(self) -> Tuple[int, ...]:
+        version = self.run_command(
+            ["version"],
+            command_desc="git version",
+            show_stdout=False,
+            stdout_only=True,
+        )
+        match = GIT_VERSION_REGEX.match(version)
+        if not match:
+            logger.warning("Can't parse git version: %s", version)
+            return ()
+        return tuple(int(c) for c in match.groups())
+
+    @classmethod
+    def get_current_branch(cls, location: str) -> Optional[str]:
+        """
+        Return the current branch, or None if HEAD isn't at a branch
+        (e.g. detached HEAD).
+        """
+        # git-symbolic-ref exits with empty stdout if "HEAD" is a detached
+        # HEAD rather than a symbolic ref.  In addition, the -q causes the
+        # command to exit with status code 1 instead of 128 in this case
+        # and to suppress the message to stderr.
+        args = ["symbolic-ref", "-q", "HEAD"]
+        output = cls.run_command(
+            args,
+            extra_ok_returncodes=(1,),
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        )
+        ref = output.strip()
+
+        if ref.startswith("refs/heads/"):
+            return ref[len("refs/heads/") :]
+
+        return None
+
+    @classmethod
+    def get_revision_sha(cls, dest: str, rev: str) -> Tuple[Optional[str], bool]:
+        """
+        Return (sha_or_none, is_branch), where sha_or_none is a commit hash
+        if the revision names a remote branch or tag, otherwise None.
+
+        Args:
+          dest: the repository directory.
+          rev: the revision name.
+        """
+        # Pass rev to pre-filter the list.
+        output = cls.run_command(
+            ["show-ref", rev],
+            cwd=dest,
+            show_stdout=False,
+            stdout_only=True,
+            on_returncode="ignore",
+        )
+        refs = {}
+        # NOTE: We do not use splitlines here since that would split on other
+        #       unicode separators, which can be maliciously used to install a
+        #       different revision.
+        for line in output.strip().split("\n"):
+            line = line.rstrip("\r")
+            if not line:
+                continue
+            try:
+                ref_sha, ref_name = line.split(" ", maxsplit=2)
+            except ValueError:
+                # Include the offending line to simplify troubleshooting if
+                # this error ever occurs.
+                raise ValueError(f"unexpected show-ref line: {line!r}")
+
+            refs[ref_name] = ref_sha
+
+        branch_ref = f"refs/remotes/origin/{rev}"
+        tag_ref = f"refs/tags/{rev}"
+
+        sha = refs.get(branch_ref)
+        if sha is not None:
+            return (sha, True)
+
+        sha = refs.get(tag_ref)
+
+        return (sha, False)
+
+    @classmethod
+    def _should_fetch(cls, dest: str, rev: str) -> bool:
+        """
+        Return true if rev is a ref or is a commit that we don't have locally.
+
+        Branches and tags are not considered in this method because they are
+        assumed to be always available locally (which is a normal outcome of
+        ``git clone`` and ``git fetch --tags``).
+        """
+        if rev.startswith("refs/"):
+            # Always fetch remote refs.
+            return True
+
+        if not looks_like_hash(rev):
+            # Git fetch would fail with abbreviated commits.
+            return False
+
+        if cls.has_commit(dest, rev):
+            # Don't fetch if we have the commit locally.
+            return False
+
+        return True
+
+    @classmethod
+    def resolve_revision(
+        cls, dest: str, url: HiddenText, rev_options: RevOptions
+    ) -> RevOptions:
+        """
+        Resolve a revision to a new RevOptions object with the SHA1 of the
+        branch, tag, or ref if found.
+
+        Args:
+          rev_options: a RevOptions object.
+        """
+        rev = rev_options.arg_rev
+        # The arg_rev property's implementation for Git ensures that the
+        # rev return value is always non-None.
+        assert rev is not None
+
+        sha, is_branch = cls.get_revision_sha(dest, rev)
+
+        if sha is not None:
+            rev_options = rev_options.make_new(sha)
+            rev_options.branch_name = rev if is_branch else None
+
+            return rev_options
+
+        # Do not show a warning for the common case of something that has
+        # the form of a Git commit hash.
+        if not looks_like_hash(rev):
+            logger.warning(
+                "Did not find branch or tag '%s', assuming revision or ref.",
+                rev,
+            )
+
+        if not cls._should_fetch(dest, rev):
+            return rev_options
+
+        # fetch the requested revision
+        cls.run_command(
+            make_command("fetch", "-q", url, rev_options.to_args()),
+            cwd=dest,
+        )
+        # Change the revision to the SHA of the ref we fetched
+        sha = cls.get_revision(dest, rev="FETCH_HEAD")
+        rev_options = rev_options.make_new(sha)
+
+        return rev_options
+
+    @classmethod
+    def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
+        """
+        Return whether the current commit hash equals the given name.
+
+        Args:
+          dest: the repository directory.
+          name: a string name.
+        """
+        if not name:
+            # Then avoid an unnecessary subprocess call.
+            return False
+
+        return cls.get_revision(dest) == name
+
+    def fetch_new(
+        self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
+    ) -> None:
+        rev_display = rev_options.to_display()
+        logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest))
+        if verbosity <= 0:
+            flags: Tuple[str, ...] = ("--quiet",)
+        elif verbosity == 1:
+            flags = ()
+        else:
+            flags = ("--verbose", "--progress")
+        if self.get_git_version() >= (2, 17):
+            # Git added support for partial clone in 2.17
+            # https://git-scm.com/docs/partial-clone
+            # Speeds up cloning by functioning without a complete copy of repository
+            self.run_command(
+                make_command(
+                    "clone",
+                    "--filter=blob:none",
+                    *flags,
+                    url,
+                    dest,
+                )
+            )
+        else:
+            self.run_command(make_command("clone", *flags, url, dest))
+
+        if rev_options.rev:
+            # Then a specific revision was requested.
+            rev_options = self.resolve_revision(dest, url, rev_options)
+            branch_name = getattr(rev_options, "branch_name", None)
+            logger.debug("Rev options %s, branch_name %s", rev_options, branch_name)
+            if branch_name is None:
+                # Only do a checkout if the current commit id doesn't match
+                # the requested revision.
+                if not self.is_commit_id_equal(dest, rev_options.rev):
+                    cmd_args = make_command(
+                        "checkout",
+                        "-q",
+                        rev_options.to_args(),
+                    )
+                    self.run_command(cmd_args, cwd=dest)
+            elif self.get_current_branch(dest) != branch_name:
+                # Then a specific branch was requested, and that branch
+                # is not yet checked out.
+                track_branch = f"origin/{branch_name}"
+                cmd_args = [
+                    "checkout",
+                    "-b",
+                    branch_name,
+                    "--track",
+                    track_branch,
+                ]
+                self.run_command(cmd_args, cwd=dest)
+        else:
+            sha = self.get_revision(dest)
+            rev_options = rev_options.make_new(sha)
+
+        logger.info("Resolved %s to commit %s", url, rev_options.rev)
+
+        #: repo may contain submodules
+        self.update_submodules(dest)
+
+    def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        self.run_command(
+            make_command("config", "remote.origin.url", url),
+            cwd=dest,
+        )
+        cmd_args = make_command("checkout", "-q", rev_options.to_args())
+        self.run_command(cmd_args, cwd=dest)
+
+        self.update_submodules(dest)
+
+    def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        # First fetch changes from the default remote
+        if self.get_git_version() >= (1, 9):
+            # fetch tags in addition to everything else
+            self.run_command(["fetch", "-q", "--tags"], cwd=dest)
+        else:
+            self.run_command(["fetch", "-q"], cwd=dest)
+        # Then reset to wanted revision (maybe even origin/master)
+        rev_options = self.resolve_revision(dest, url, rev_options)
+        cmd_args = make_command("reset", "--hard", "-q", rev_options.to_args())
+        self.run_command(cmd_args, cwd=dest)
+        #: update submodules
+        self.update_submodules(dest)
+
+    @classmethod
+    def get_remote_url(cls, location: str) -> str:
+        """
+        Return URL of the first remote encountered.
+
+        Raises RemoteNotFoundError if the repository does not have a remote
+        url configured.
+        """
+        # We need to pass 1 for extra_ok_returncodes since the command
+        # exits with return code 1 if there are no matching lines.
+        stdout = cls.run_command(
+            ["config", "--get-regexp", r"remote\..*\.url"],
+            extra_ok_returncodes=(1,),
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        )
+        remotes = stdout.splitlines()
+        try:
+            found_remote = remotes[0]
+        except IndexError:
+            raise RemoteNotFoundError
+
+        for remote in remotes:
+            if remote.startswith("remote.origin.url "):
+                found_remote = remote
+                break
+        url = found_remote.split(" ")[1]
+        return cls._git_remote_to_pip_url(url.strip())
+
+    @staticmethod
+    def _git_remote_to_pip_url(url: str) -> str:
+        """
+        Convert a remote url from what git uses to what pip accepts.
+
+        There are 3 legal forms **url** may take:
+
+            1. A fully qualified url: ssh://git@example.com/foo/bar.git
+            2. A local project.git folder: /path/to/bare/repository.git
+            3. SCP shorthand for form 1: git@example.com:foo/bar.git
+
+        Form 1 is output as-is. Form 2 must be converted to URI and form 3 must
+        be converted to form 1.
+
+        See the corresponding test test_git_remote_url_to_pip() for examples of
+        sample inputs/outputs.
+        """
+        if re.match(r"\w+://", url):
+            # This is already valid. Pass it though as-is.
+            return url
+        if os.path.exists(url):
+            # A local bare remote (git clone --mirror).
+            # Needs a file:// prefix.
+            return pathlib.PurePath(url).as_uri()
+        scp_match = SCP_REGEX.match(url)
+        if scp_match:
+            # Add an ssh:// prefix and replace the ':' with a '/'.
+            return scp_match.expand(r"ssh://\1\2/\3")
+        # Otherwise, bail out.
+        raise RemoteNotValidError(url)
+
+    @classmethod
+    def has_commit(cls, location: str, rev: str) -> bool:
+        """
+        Check if rev is a commit that is available in the local repository.
+        """
+        try:
+            cls.run_command(
+                ["rev-parse", "-q", "--verify", "sha^" + rev],
+                cwd=location,
+                log_failed_cmd=False,
+            )
+        except InstallationError:
+            return False
+        else:
+            return True
+
+    @classmethod
+    def get_revision(cls, location: str, rev: Optional[str] = None) -> str:
+        if rev is None:
+            rev = "HEAD"
+        current_rev = cls.run_command(
+            ["rev-parse", rev],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        )
+        return current_rev.strip()
+
+    @classmethod
+    def get_subdirectory(cls, location: str) -> Optional[str]:
+        """
+        Return the path to Python project root, relative to the repo root.
+        Return None if the project root is in the repo root.
+        """
+        # find the repo root
+        git_dir = cls.run_command(
+            ["rev-parse", "--git-dir"],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        ).strip()
+        if not os.path.isabs(git_dir):
+            git_dir = os.path.join(location, git_dir)
+        repo_root = os.path.abspath(os.path.join(git_dir, ".."))
+        return find_path_to_project_root_from_repo_root(location, repo_root)
+
+    @classmethod
+    def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
+        """
+        Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
+        That's required because although they use SSH they sometimes don't
+        work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
+        parsing. Hence we remove it again afterwards and return it as a stub.
+        """
+        # Works around an apparent Git bug
+        # (see https://article.gmane.org/gmane.comp.version-control.git/146500)
+        scheme, netloc, path, query, fragment = urlsplit(url)
+        if scheme.endswith("file"):
+            initial_slashes = path[: -len(path.lstrip("/"))]
+            newpath = initial_slashes + urllib.request.url2pathname(path).replace(
+                "\\", "/"
+            ).lstrip("/")
+            after_plus = scheme.find("+") + 1
+            url = scheme[:after_plus] + urlunsplit(
+                (scheme[after_plus:], netloc, newpath, query, fragment),
+            )
+
+        if "://" not in url:
+            assert "file:" not in url
+            url = url.replace("git+", "git+ssh://")
+            url, rev, user_pass = super().get_url_rev_and_auth(url)
+            url = url.replace("ssh://", "")
+        else:
+            url, rev, user_pass = super().get_url_rev_and_auth(url)
+
+        return url, rev, user_pass
+
+    @classmethod
+    def update_submodules(cls, location: str) -> None:
+        if not os.path.exists(os.path.join(location, ".gitmodules")):
+            return
+        cls.run_command(
+            ["submodule", "update", "--init", "--recursive", "-q"],
+            cwd=location,
+        )
+
+    @classmethod
+    def get_repository_root(cls, location: str) -> Optional[str]:
+        loc = super().get_repository_root(location)
+        if loc:
+            return loc
+        try:
+            r = cls.run_command(
+                ["rev-parse", "--show-toplevel"],
+                cwd=location,
+                show_stdout=False,
+                stdout_only=True,
+                on_returncode="raise",
+                log_failed_cmd=False,
+            )
+        except BadCommand:
+            logger.debug(
+                "could not determine if %s is under git control "
+                "because git is not available",
+                location,
+            )
+            return None
+        except InstallationError:
+            return None
+        return os.path.normpath(r.rstrip("\r\n"))
+
+    @staticmethod
+    def should_add_vcs_url_prefix(repo_url: str) -> bool:
+        """In either https or ssh form, requirements must be prefixed with git+."""
+        return True
+
+
+vcs.register(Git)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/mercurial.py b/venv/lib/python3.9/site-packages/pip/_internal/vcs/mercurial.py
new file mode 100644
index 0000000..2a005e0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/mercurial.py
@@ -0,0 +1,163 @@
+import configparser
+import logging
+import os
+from typing import List, Optional, Tuple
+
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.utils.misc import HiddenText, display_path
+from pip._internal.utils.subprocess import make_command
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs.versioncontrol import (
+    RevOptions,
+    VersionControl,
+    find_path_to_project_root_from_repo_root,
+    vcs,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class Mercurial(VersionControl):
+    name = "hg"
+    dirname = ".hg"
+    repo_name = "clone"
+    schemes = (
+        "hg+file",
+        "hg+http",
+        "hg+https",
+        "hg+ssh",
+        "hg+static-http",
+    )
+
+    @staticmethod
+    def get_base_rev_args(rev: str) -> List[str]:
+        return [rev]
+
+    def fetch_new(
+        self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
+    ) -> None:
+        rev_display = rev_options.to_display()
+        logger.info(
+            "Cloning hg %s%s to %s",
+            url,
+            rev_display,
+            display_path(dest),
+        )
+        if verbosity <= 0:
+            flags: Tuple[str, ...] = ("--quiet",)
+        elif verbosity == 1:
+            flags = ()
+        elif verbosity == 2:
+            flags = ("--verbose",)
+        else:
+            flags = ("--verbose", "--debug")
+        self.run_command(make_command("clone", "--noupdate", *flags, url, dest))
+        self.run_command(
+            make_command("update", *flags, rev_options.to_args()),
+            cwd=dest,
+        )
+
+    def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        repo_config = os.path.join(dest, self.dirname, "hgrc")
+        config = configparser.RawConfigParser()
+        try:
+            config.read(repo_config)
+            config.set("paths", "default", url.secret)
+            with open(repo_config, "w") as config_file:
+                config.write(config_file)
+        except (OSError, configparser.NoSectionError) as exc:
+            logger.warning("Could not switch Mercurial repository to %s: %s", url, exc)
+        else:
+            cmd_args = make_command("update", "-q", rev_options.to_args())
+            self.run_command(cmd_args, cwd=dest)
+
+    def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        self.run_command(["pull", "-q"], cwd=dest)
+        cmd_args = make_command("update", "-q", rev_options.to_args())
+        self.run_command(cmd_args, cwd=dest)
+
+    @classmethod
+    def get_remote_url(cls, location: str) -> str:
+        url = cls.run_command(
+            ["showconfig", "paths.default"],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        ).strip()
+        if cls._is_local_repository(url):
+            url = path_to_url(url)
+        return url.strip()
+
+    @classmethod
+    def get_revision(cls, location: str) -> str:
+        """
+        Return the repository-local changeset revision number, as an integer.
+        """
+        current_revision = cls.run_command(
+            ["parents", "--template={rev}"],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        ).strip()
+        return current_revision
+
+    @classmethod
+    def get_requirement_revision(cls, location: str) -> str:
+        """
+        Return the changeset identification hash, as a 40-character
+        hexadecimal string
+        """
+        current_rev_hash = cls.run_command(
+            ["parents", "--template={node}"],
+            show_stdout=False,
+            stdout_only=True,
+            cwd=location,
+        ).strip()
+        return current_rev_hash
+
+    @classmethod
+    def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
+        """Always assume the versions don't match"""
+        return False
+
+    @classmethod
+    def get_subdirectory(cls, location: str) -> Optional[str]:
+        """
+        Return the path to Python project root, relative to the repo root.
+        Return None if the project root is in the repo root.
+        """
+        # find the repo root
+        repo_root = cls.run_command(
+            ["root"], show_stdout=False, stdout_only=True, cwd=location
+        ).strip()
+        if not os.path.isabs(repo_root):
+            repo_root = os.path.abspath(os.path.join(location, repo_root))
+        return find_path_to_project_root_from_repo_root(location, repo_root)
+
+    @classmethod
+    def get_repository_root(cls, location: str) -> Optional[str]:
+        loc = super().get_repository_root(location)
+        if loc:
+            return loc
+        try:
+            r = cls.run_command(
+                ["root"],
+                cwd=location,
+                show_stdout=False,
+                stdout_only=True,
+                on_returncode="raise",
+                log_failed_cmd=False,
+            )
+        except BadCommand:
+            logger.debug(
+                "could not determine if %s is under hg control "
+                "because hg is not available",
+                location,
+            )
+            return None
+        except InstallationError:
+            return None
+        return os.path.normpath(r.rstrip("\r\n"))
+
+
+vcs.register(Mercurial)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/subversion.py b/venv/lib/python3.9/site-packages/pip/_internal/vcs/subversion.py
new file mode 100644
index 0000000..2cd6f0a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/subversion.py
@@ -0,0 +1,324 @@
+import logging
+import os
+import re
+from typing import List, Optional, Tuple
+
+from pip._internal.utils.misc import (
+    HiddenText,
+    display_path,
+    is_console_interactive,
+    is_installable_dir,
+    split_auth_from_netloc,
+)
+from pip._internal.utils.subprocess import CommandArgs, make_command
+from pip._internal.vcs.versioncontrol import (
+    AuthInfo,
+    RemoteNotFoundError,
+    RevOptions,
+    VersionControl,
+    vcs,
+)
+
+logger = logging.getLogger(__name__)
+
+_svn_xml_url_re = re.compile('url="([^"]+)"')
+_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
+_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
+_svn_info_xml_url_re = re.compile(r"<url>(.*)</url>")
+
+
+class Subversion(VersionControl):
+    name = "svn"
+    dirname = ".svn"
+    repo_name = "checkout"
+    schemes = ("svn+ssh", "svn+http", "svn+https", "svn+svn", "svn+file")
+
+    @classmethod
+    def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:
+        return True
+
+    @staticmethod
+    def get_base_rev_args(rev: str) -> List[str]:
+        return ["-r", rev]
+
+    @classmethod
+    def get_revision(cls, location: str) -> str:
+        """
+        Return the maximum revision for all files under a given location
+        """
+        # Note: taken from setuptools.command.egg_info
+        revision = 0
+
+        for base, dirs, _ in os.walk(location):
+            if cls.dirname not in dirs:
+                dirs[:] = []
+                continue  # no sense walking uncontrolled subdirs
+            dirs.remove(cls.dirname)
+            entries_fn = os.path.join(base, cls.dirname, "entries")
+            if not os.path.exists(entries_fn):
+                # FIXME: should we warn?
+                continue
+
+            dirurl, localrev = cls._get_svn_url_rev(base)
+
+            if base == location:
+                assert dirurl is not None
+                base = dirurl + "/"  # save the root url
+            elif not dirurl or not dirurl.startswith(base):
+                dirs[:] = []
+                continue  # not part of the same svn tree, skip it
+            revision = max(revision, localrev)
+        return str(revision)
+
+    @classmethod
+    def get_netloc_and_auth(
+        cls, netloc: str, scheme: str
+    ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:
+        """
+        This override allows the auth information to be passed to svn via the
+        --username and --password options instead of via the URL.
+        """
+        if scheme == "ssh":
+            # The --username and --password options can't be used for
+            # svn+ssh URLs, so keep the auth information in the URL.
+            return super().get_netloc_and_auth(netloc, scheme)
+
+        return split_auth_from_netloc(netloc)
+
+    @classmethod
+    def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
+        # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
+        url, rev, user_pass = super().get_url_rev_and_auth(url)
+        if url.startswith("ssh://"):
+            url = "svn+" + url
+        return url, rev, user_pass
+
+    @staticmethod
+    def make_rev_args(
+        username: Optional[str], password: Optional[HiddenText]
+    ) -> CommandArgs:
+        extra_args: CommandArgs = []
+        if username:
+            extra_args += ["--username", username]
+        if password:
+            extra_args += ["--password", password]
+
+        return extra_args
+
+    @classmethod
+    def get_remote_url(cls, location: str) -> str:
+        # In cases where the source is in a subdirectory, we have to look up in
+        # the location until we find a valid project root.
+        orig_location = location
+        while not is_installable_dir(location):
+            last_location = location
+            location = os.path.dirname(location)
+            if location == last_location:
+                # We've traversed up to the root of the filesystem without
+                # finding a Python project.
+                logger.warning(
+                    "Could not find Python project for directory %s (tried all "
+                    "parent directories)",
+                    orig_location,
+                )
+                raise RemoteNotFoundError
+
+        url, _rev = cls._get_svn_url_rev(location)
+        if url is None:
+            raise RemoteNotFoundError
+
+        return url
+
+    @classmethod
+    def _get_svn_url_rev(cls, location: str) -> Tuple[Optional[str], int]:
+        from pip._internal.exceptions import InstallationError
+
+        entries_path = os.path.join(location, cls.dirname, "entries")
+        if os.path.exists(entries_path):
+            with open(entries_path) as f:
+                data = f.read()
+        else:  # subversion >= 1.7 does not have the 'entries' file
+            data = ""
+
+        url = None
+        if data.startswith("8") or data.startswith("9") or data.startswith("10"):
+            entries = list(map(str.splitlines, data.split("\n\x0c\n")))
+            del entries[0][0]  # get rid of the '8'
+            url = entries[0][3]
+            revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0]
+        elif data.startswith("<?xml"):
+            match = _svn_xml_url_re.search(data)
+            if not match:
+                raise ValueError(f"Badly formatted data: {data!r}")
+            url = match.group(1)  # get repository URL
+            revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
+        else:
+            try:
+                # subversion >= 1.7
+                # Note that using get_remote_call_options is not necessary here
+                # because `svn info` is being run against a local directory.
+                # We don't need to worry about making sure interactive mode
+                # is being used to prompt for passwords, because passwords
+                # are only potentially needed for remote server requests.
+                xml = cls.run_command(
+                    ["info", "--xml", location],
+                    show_stdout=False,
+                    stdout_only=True,
+                )
+                match = _svn_info_xml_url_re.search(xml)
+                assert match is not None
+                url = match.group(1)
+                revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)]
+            except InstallationError:
+                url, revs = None, []
+
+        if revs:
+            rev = max(revs)
+        else:
+            rev = 0
+
+        return url, rev
+
+    @classmethod
+    def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
+        """Always assume the versions don't match"""
+        return False
+
+    def __init__(self, use_interactive: Optional[bool] = None) -> None:
+        if use_interactive is None:
+            use_interactive = is_console_interactive()
+        self.use_interactive = use_interactive
+
+        # This member is used to cache the fetched version of the current
+        # ``svn`` client.
+        # Special value definitions:
+        #   None: Not evaluated yet.
+        #   Empty tuple: Could not parse version.
+        self._vcs_version: Optional[Tuple[int, ...]] = None
+
+        super().__init__()
+
+    def call_vcs_version(self) -> Tuple[int, ...]:
+        """Query the version of the currently installed Subversion client.
+
+        :return: A tuple containing the parts of the version information or
+            ``()`` if the version returned from ``svn`` could not be parsed.
+        :raises: BadCommand: If ``svn`` is not installed.
+        """
+        # Example versions:
+        #   svn, version 1.10.3 (r1842928)
+        #      compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0
+        #   svn, version 1.7.14 (r1542130)
+        #      compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu
+        #   svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0)
+        #      compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2
+        version_prefix = "svn, version "
+        version = self.run_command(["--version"], show_stdout=False, stdout_only=True)
+        if not version.startswith(version_prefix):
+            return ()
+
+        version = version[len(version_prefix) :].split()[0]
+        version_list = version.partition("-")[0].split(".")
+        try:
+            parsed_version = tuple(map(int, version_list))
+        except ValueError:
+            return ()
+
+        return parsed_version
+
+    def get_vcs_version(self) -> Tuple[int, ...]:
+        """Return the version of the currently installed Subversion client.
+
+        If the version of the Subversion client has already been queried,
+        a cached value will be used.
+
+        :return: A tuple containing the parts of the version information or
+            ``()`` if the version returned from ``svn`` could not be parsed.
+        :raises: BadCommand: If ``svn`` is not installed.
+        """
+        if self._vcs_version is not None:
+            # Use cached version, if available.
+            # If parsing the version failed previously (empty tuple),
+            # do not attempt to parse it again.
+            return self._vcs_version
+
+        vcs_version = self.call_vcs_version()
+        self._vcs_version = vcs_version
+        return vcs_version
+
+    def get_remote_call_options(self) -> CommandArgs:
+        """Return options to be used on calls to Subversion that contact the server.
+
+        These options are applicable for the following ``svn`` subcommands used
+        in this class.
+
+            - checkout
+            - switch
+            - update
+
+        :return: A list of command line arguments to pass to ``svn``.
+        """
+        if not self.use_interactive:
+            # --non-interactive switch is available since Subversion 0.14.4.
+            # Subversion < 1.8 runs in interactive mode by default.
+            return ["--non-interactive"]
+
+        svn_version = self.get_vcs_version()
+        # By default, Subversion >= 1.8 runs in non-interactive mode if
+        # stdin is not a TTY. Since that is how pip invokes SVN, in
+        # call_subprocess(), pip must pass --force-interactive to ensure
+        # the user can be prompted for a password, if required.
+        #   SVN added the --force-interactive option in SVN 1.8. Since
+        # e.g. RHEL/CentOS 7, which is supported until 2024, ships with
+        # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip
+        # can't safely add the option if the SVN version is < 1.8 (or unknown).
+        if svn_version >= (1, 8):
+            return ["--force-interactive"]
+
+        return []
+
+    def fetch_new(
+        self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
+    ) -> None:
+        rev_display = rev_options.to_display()
+        logger.info(
+            "Checking out %s%s to %s",
+            url,
+            rev_display,
+            display_path(dest),
+        )
+        if verbosity <= 0:
+            flag = "--quiet"
+        else:
+            flag = ""
+        cmd_args = make_command(
+            "checkout",
+            flag,
+            self.get_remote_call_options(),
+            rev_options.to_args(),
+            url,
+            dest,
+        )
+        self.run_command(cmd_args)
+
+    def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        cmd_args = make_command(
+            "switch",
+            self.get_remote_call_options(),
+            rev_options.to_args(),
+            url,
+            dest,
+        )
+        self.run_command(cmd_args)
+
+    def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        cmd_args = make_command(
+            "update",
+            self.get_remote_call_options(),
+            rev_options.to_args(),
+            dest,
+        )
+        self.run_command(cmd_args)
+
+
+vcs.register(Subversion)
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/vcs/versioncontrol.py b/venv/lib/python3.9/site-packages/pip/_internal/vcs/versioncontrol.py
new file mode 100644
index 0000000..02bbf68
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/vcs/versioncontrol.py
@@ -0,0 +1,705 @@
+"""Handles all VCS (version control) support"""
+
+import logging
+import os
+import shutil
+import sys
+import urllib.parse
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    Mapping,
+    Optional,
+    Tuple,
+    Type,
+    Union,
+)
+
+from pip._internal.cli.spinners import SpinnerInterface
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.utils.misc import (
+    HiddenText,
+    ask_path_exists,
+    backup_dir,
+    display_path,
+    hide_url,
+    hide_value,
+    is_installable_dir,
+    rmtree,
+)
+from pip._internal.utils.subprocess import (
+    CommandArgs,
+    call_subprocess,
+    format_command_args,
+    make_command,
+)
+from pip._internal.utils.urls import get_url_scheme
+
+if TYPE_CHECKING:
+    # Literal was introduced in Python 3.8.
+    #
+    # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7.
+    from typing import Literal
+
+
+__all__ = ["vcs"]
+
+
+logger = logging.getLogger(__name__)
+
+AuthInfo = Tuple[Optional[str], Optional[str]]
+
+
+def is_url(name: str) -> bool:
+    """
+    Return true if the name looks like a URL.
+    """
+    scheme = get_url_scheme(name)
+    if scheme is None:
+        return False
+    return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes
+
+
+def make_vcs_requirement_url(
+    repo_url: str, rev: str, project_name: str, subdir: Optional[str] = None
+) -> str:
+    """
+    Return the URL for a VCS requirement.
+
+    Args:
+      repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
+      project_name: the (unescaped) project name.
+    """
+    egg_project_name = project_name.replace("-", "_")
+    req = f"{repo_url}@{rev}#egg={egg_project_name}"
+    if subdir:
+        req += f"&subdirectory={subdir}"
+
+    return req
+
+
+def find_path_to_project_root_from_repo_root(
+    location: str, repo_root: str
+) -> Optional[str]:
+    """
+    Find the the Python project's root by searching up the filesystem from
+    `location`. Return the path to project root relative to `repo_root`.
+    Return None if the project root is `repo_root`, or cannot be found.
+    """
+    # find project root.
+    orig_location = location
+    while not is_installable_dir(location):
+        last_location = location
+        location = os.path.dirname(location)
+        if location == last_location:
+            # We've traversed up to the root of the filesystem without
+            # finding a Python project.
+            logger.warning(
+                "Could not find a Python project for directory %s (tried all "
+                "parent directories)",
+                orig_location,
+            )
+            return None
+
+    if os.path.samefile(repo_root, location):
+        return None
+
+    return os.path.relpath(location, repo_root)
+
+
+class RemoteNotFoundError(Exception):
+    pass
+
+
+class RemoteNotValidError(Exception):
+    def __init__(self, url: str):
+        super().__init__(url)
+        self.url = url
+
+
+class RevOptions:
+
+    """
+    Encapsulates a VCS-specific revision to install, along with any VCS
+    install options.
+
+    Instances of this class should be treated as if immutable.
+    """
+
+    def __init__(
+        self,
+        vc_class: Type["VersionControl"],
+        rev: Optional[str] = None,
+        extra_args: Optional[CommandArgs] = None,
+    ) -> None:
+        """
+        Args:
+          vc_class: a VersionControl subclass.
+          rev: the name of the revision to install.
+          extra_args: a list of extra options.
+        """
+        if extra_args is None:
+            extra_args = []
+
+        self.extra_args = extra_args
+        self.rev = rev
+        self.vc_class = vc_class
+        self.branch_name: Optional[str] = None
+
+    def __repr__(self) -> str:
+        return f"<RevOptions {self.vc_class.name}: rev={self.rev!r}>"
+
+    @property
+    def arg_rev(self) -> Optional[str]:
+        if self.rev is None:
+            return self.vc_class.default_arg_rev
+
+        return self.rev
+
+    def to_args(self) -> CommandArgs:
+        """
+        Return the VCS-specific command arguments.
+        """
+        args: CommandArgs = []
+        rev = self.arg_rev
+        if rev is not None:
+            args += self.vc_class.get_base_rev_args(rev)
+        args += self.extra_args
+
+        return args
+
+    def to_display(self) -> str:
+        if not self.rev:
+            return ""
+
+        return f" (to revision {self.rev})"
+
+    def make_new(self, rev: str) -> "RevOptions":
+        """
+        Make a copy of the current instance, but with a new rev.
+
+        Args:
+          rev: the name of the revision for the new object.
+        """
+        return self.vc_class.make_rev_options(rev, extra_args=self.extra_args)
+
+
+class VcsSupport:
+    _registry: Dict[str, "VersionControl"] = {}
+    schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"]
+
+    def __init__(self) -> None:
+        # Register more schemes with urlparse for various version control
+        # systems
+        urllib.parse.uses_netloc.extend(self.schemes)
+        super().__init__()
+
+    def __iter__(self) -> Iterator[str]:
+        return self._registry.__iter__()
+
+    @property
+    def backends(self) -> List["VersionControl"]:
+        return list(self._registry.values())
+
+    @property
+    def dirnames(self) -> List[str]:
+        return [backend.dirname for backend in self.backends]
+
+    @property
+    def all_schemes(self) -> List[str]:
+        schemes: List[str] = []
+        for backend in self.backends:
+            schemes.extend(backend.schemes)
+        return schemes
+
+    def register(self, cls: Type["VersionControl"]) -> None:
+        if not hasattr(cls, "name"):
+            logger.warning("Cannot register VCS %s", cls.__name__)
+            return
+        if cls.name not in self._registry:
+            self._registry[cls.name] = cls()
+            logger.debug("Registered VCS backend: %s", cls.name)
+
+    def unregister(self, name: str) -> None:
+        if name in self._registry:
+            del self._registry[name]
+
+    def get_backend_for_dir(self, location: str) -> Optional["VersionControl"]:
+        """
+        Return a VersionControl object if a repository of that type is found
+        at the given directory.
+        """
+        vcs_backends = {}
+        for vcs_backend in self._registry.values():
+            repo_path = vcs_backend.get_repository_root(location)
+            if not repo_path:
+                continue
+            logger.debug("Determine that %s uses VCS: %s", location, vcs_backend.name)
+            vcs_backends[repo_path] = vcs_backend
+
+        if not vcs_backends:
+            return None
+
+        # Choose the VCS in the inner-most directory. Since all repository
+        # roots found here would be either `location` or one of its
+        # parents, the longest path should have the most path components,
+        # i.e. the backend representing the inner-most repository.
+        inner_most_repo_path = max(vcs_backends, key=len)
+        return vcs_backends[inner_most_repo_path]
+
+    def get_backend_for_scheme(self, scheme: str) -> Optional["VersionControl"]:
+        """
+        Return a VersionControl object or None.
+        """
+        for vcs_backend in self._registry.values():
+            if scheme in vcs_backend.schemes:
+                return vcs_backend
+        return None
+
+    def get_backend(self, name: str) -> Optional["VersionControl"]:
+        """
+        Return a VersionControl object or None.
+        """
+        name = name.lower()
+        return self._registry.get(name)
+
+
+vcs = VcsSupport()
+
+
+class VersionControl:
+    name = ""
+    dirname = ""
+    repo_name = ""
+    # List of supported schemes for this Version Control
+    schemes: Tuple[str, ...] = ()
+    # Iterable of environment variable names to pass to call_subprocess().
+    unset_environ: Tuple[str, ...] = ()
+    default_arg_rev: Optional[str] = None
+
+    @classmethod
+    def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:
+        """
+        Return whether the vcs prefix (e.g. "git+") should be added to a
+        repository's remote url when used in a requirement.
+        """
+        return not remote_url.lower().startswith(f"{cls.name}:")
+
+    @classmethod
+    def get_subdirectory(cls, location: str) -> Optional[str]:
+        """
+        Return the path to Python project root, relative to the repo root.
+        Return None if the project root is in the repo root.
+        """
+        return None
+
+    @classmethod
+    def get_requirement_revision(cls, repo_dir: str) -> str:
+        """
+        Return the revision string that should be used in a requirement.
+        """
+        return cls.get_revision(repo_dir)
+
+    @classmethod
+    def get_src_requirement(cls, repo_dir: str, project_name: str) -> str:
+        """
+        Return the requirement string to use to redownload the files
+        currently at the given repository directory.
+
+        Args:
+          project_name: the (unescaped) project name.
+
+        The return value has a form similar to the following:
+
+            {repository_url}@{revision}#egg={project_name}
+        """
+        repo_url = cls.get_remote_url(repo_dir)
+
+        if cls.should_add_vcs_url_prefix(repo_url):
+            repo_url = f"{cls.name}+{repo_url}"
+
+        revision = cls.get_requirement_revision(repo_dir)
+        subdir = cls.get_subdirectory(repo_dir)
+        req = make_vcs_requirement_url(repo_url, revision, project_name, subdir=subdir)
+
+        return req
+
+    @staticmethod
+    def get_base_rev_args(rev: str) -> List[str]:
+        """
+        Return the base revision arguments for a vcs command.
+
+        Args:
+          rev: the name of a revision to install.  Cannot be None.
+        """
+        raise NotImplementedError
+
+    def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:
+        """
+        Return true if the commit hash checked out at dest matches
+        the revision in url.
+
+        Always return False, if the VCS does not support immutable commit
+        hashes.
+
+        This method does not check if there are local uncommitted changes
+        in dest after checkout, as pip currently has no use case for that.
+        """
+        return False
+
+    @classmethod
+    def make_rev_options(
+        cls, rev: Optional[str] = None, extra_args: Optional[CommandArgs] = None
+    ) -> RevOptions:
+        """
+        Return a RevOptions object.
+
+        Args:
+          rev: the name of a revision to install.
+          extra_args: a list of extra options.
+        """
+        return RevOptions(cls, rev, extra_args=extra_args)
+
+    @classmethod
+    def _is_local_repository(cls, repo: str) -> bool:
+        """
+        posix absolute paths start with os.path.sep,
+        win32 ones start with drive (like c:\\folder)
+        """
+        drive, tail = os.path.splitdrive(repo)
+        return repo.startswith(os.path.sep) or bool(drive)
+
+    @classmethod
+    def get_netloc_and_auth(
+        cls, netloc: str, scheme: str
+    ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:
+        """
+        Parse the repository URL's netloc, and return the new netloc to use
+        along with auth information.
+
+        Args:
+          netloc: the original repository URL netloc.
+          scheme: the repository URL's scheme without the vcs prefix.
+
+        This is mainly for the Subversion class to override, so that auth
+        information can be provided via the --username and --password options
+        instead of through the URL.  For other subclasses like Git without
+        such an option, auth information must stay in the URL.
+
+        Returns: (netloc, (username, password)).
+        """
+        return netloc, (None, None)
+
+    @classmethod
+    def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
+        """
+        Parse the repository URL to use, and return the URL, revision,
+        and auth info to use.
+
+        Returns: (url, rev, (username, password)).
+        """
+        scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
+        if "+" not in scheme:
+            raise ValueError(
+                "Sorry, {!r} is a malformed VCS url. "
+                "The format is <vcs>+<protocol>://<url>, "
+                "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url)
+            )
+        # Remove the vcs prefix.
+        scheme = scheme.split("+", 1)[1]
+        netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme)
+        rev = None
+        if "@" in path:
+            path, rev = path.rsplit("@", 1)
+            if not rev:
+                raise InstallationError(
+                    "The URL {!r} has an empty revision (after @) "
+                    "which is not supported. Include a revision after @ "
+                    "or remove @ from the URL.".format(url)
+                )
+        url = urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
+        return url, rev, user_pass
+
+    @staticmethod
+    def make_rev_args(
+        username: Optional[str], password: Optional[HiddenText]
+    ) -> CommandArgs:
+        """
+        Return the RevOptions "extra arguments" to use in obtain().
+        """
+        return []
+
+    def get_url_rev_options(self, url: HiddenText) -> Tuple[HiddenText, RevOptions]:
+        """
+        Return the URL and RevOptions object to use in obtain(),
+        as a tuple (url, rev_options).
+        """
+        secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret)
+        username, secret_password = user_pass
+        password: Optional[HiddenText] = None
+        if secret_password is not None:
+            password = hide_value(secret_password)
+        extra_args = self.make_rev_args(username, password)
+        rev_options = self.make_rev_options(rev, extra_args=extra_args)
+
+        return hide_url(secret_url), rev_options
+
+    @staticmethod
+    def normalize_url(url: str) -> str:
+        """
+        Normalize a URL for comparison by unquoting it and removing any
+        trailing slash.
+        """
+        return urllib.parse.unquote(url).rstrip("/")
+
+    @classmethod
+    def compare_urls(cls, url1: str, url2: str) -> bool:
+        """
+        Compare two repo URLs for identity, ignoring incidental differences.
+        """
+        return cls.normalize_url(url1) == cls.normalize_url(url2)
+
+    def fetch_new(
+        self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
+    ) -> None:
+        """
+        Fetch a revision from a repository, in the case that this is the
+        first fetch from the repository.
+
+        Args:
+          dest: the directory to fetch the repository to.
+          rev_options: a RevOptions object.
+          verbosity: verbosity level.
+        """
+        raise NotImplementedError
+
+    def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        """
+        Switch the repo at ``dest`` to point to ``URL``.
+
+        Args:
+          rev_options: a RevOptions object.
+        """
+        raise NotImplementedError
+
+    def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
+        """
+        Update an already-existing repo to the given ``rev_options``.
+
+        Args:
+          rev_options: a RevOptions object.
+        """
+        raise NotImplementedError
+
+    @classmethod
+    def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
+        """
+        Return whether the id of the current commit equals the given name.
+
+        Args:
+          dest: the repository directory.
+          name: a string name.
+        """
+        raise NotImplementedError
+
+    def obtain(self, dest: str, url: HiddenText, verbosity: int) -> None:
+        """
+        Install or update in editable mode the package represented by this
+        VersionControl object.
+
+        :param dest: the repository directory in which to install or update.
+        :param url: the repository URL starting with a vcs prefix.
+        :param verbosity: verbosity level.
+        """
+        url, rev_options = self.get_url_rev_options(url)
+
+        if not os.path.exists(dest):
+            self.fetch_new(dest, url, rev_options, verbosity=verbosity)
+            return
+
+        rev_display = rev_options.to_display()
+        if self.is_repository_directory(dest):
+            existing_url = self.get_remote_url(dest)
+            if self.compare_urls(existing_url, url.secret):
+                logger.debug(
+                    "%s in %s exists, and has correct URL (%s)",
+                    self.repo_name.title(),
+                    display_path(dest),
+                    url,
+                )
+                if not self.is_commit_id_equal(dest, rev_options.rev):
+                    logger.info(
+                        "Updating %s %s%s",
+                        display_path(dest),
+                        self.repo_name,
+                        rev_display,
+                    )
+                    self.update(dest, url, rev_options)
+                else:
+                    logger.info("Skipping because already up-to-date.")
+                return
+
+            logger.warning(
+                "%s %s in %s exists with URL %s",
+                self.name,
+                self.repo_name,
+                display_path(dest),
+                existing_url,
+            )
+            prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b"))
+        else:
+            logger.warning(
+                "Directory %s already exists, and is not a %s %s.",
+                dest,
+                self.name,
+                self.repo_name,
+            )
+            # https://github.com/python/mypy/issues/1174
+            prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b"))  # type: ignore
+
+        logger.warning(
+            "The plan is to install the %s repository %s",
+            self.name,
+            url,
+        )
+        response = ask_path_exists("What to do?  {}".format(prompt[0]), prompt[1])
+
+        if response == "a":
+            sys.exit(-1)
+
+        if response == "w":
+            logger.warning("Deleting %s", display_path(dest))
+            rmtree(dest)
+            self.fetch_new(dest, url, rev_options, verbosity=verbosity)
+            return
+
+        if response == "b":
+            dest_dir = backup_dir(dest)
+            logger.warning("Backing up %s to %s", display_path(dest), dest_dir)
+            shutil.move(dest, dest_dir)
+            self.fetch_new(dest, url, rev_options, verbosity=verbosity)
+            return
+
+        # Do nothing if the response is "i".
+        if response == "s":
+            logger.info(
+                "Switching %s %s to %s%s",
+                self.repo_name,
+                display_path(dest),
+                url,
+                rev_display,
+            )
+            self.switch(dest, url, rev_options)
+
+    def unpack(self, location: str, url: HiddenText, verbosity: int) -> None:
+        """
+        Clean up current location and download the url repository
+        (and vcs infos) into location
+
+        :param url: the repository URL starting with a vcs prefix.
+        :param verbosity: verbosity level.
+        """
+        if os.path.exists(location):
+            rmtree(location)
+        self.obtain(location, url=url, verbosity=verbosity)
+
+    @classmethod
+    def get_remote_url(cls, location: str) -> str:
+        """
+        Return the url used at location
+
+        Raises RemoteNotFoundError if the repository does not have a remote
+        url configured.
+        """
+        raise NotImplementedError
+
+    @classmethod
+    def get_revision(cls, location: str) -> str:
+        """
+        Return the current commit id of the files at the given location.
+        """
+        raise NotImplementedError
+
+    @classmethod
+    def run_command(
+        cls,
+        cmd: Union[List[str], CommandArgs],
+        show_stdout: bool = True,
+        cwd: Optional[str] = None,
+        on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
+        extra_ok_returncodes: Optional[Iterable[int]] = None,
+        command_desc: Optional[str] = None,
+        extra_environ: Optional[Mapping[str, Any]] = None,
+        spinner: Optional[SpinnerInterface] = None,
+        log_failed_cmd: bool = True,
+        stdout_only: bool = False,
+    ) -> str:
+        """
+        Run a VCS subcommand
+        This is simply a wrapper around call_subprocess that adds the VCS
+        command name, and checks that the VCS is available
+        """
+        cmd = make_command(cls.name, *cmd)
+        if command_desc is None:
+            command_desc = format_command_args(cmd)
+        try:
+            return call_subprocess(
+                cmd,
+                show_stdout,
+                cwd,
+                on_returncode=on_returncode,
+                extra_ok_returncodes=extra_ok_returncodes,
+                command_desc=command_desc,
+                extra_environ=extra_environ,
+                unset_environ=cls.unset_environ,
+                spinner=spinner,
+                log_failed_cmd=log_failed_cmd,
+                stdout_only=stdout_only,
+            )
+        except FileNotFoundError:
+            # errno.ENOENT = no such file or directory
+            # In other words, the VCS executable isn't available
+            raise BadCommand(
+                f"Cannot find command {cls.name!r} - do you have "
+                f"{cls.name!r} installed and in your PATH?"
+            )
+        except PermissionError:
+            # errno.EACCES = Permission denied
+            # This error occurs, for instance, when the command is installed
+            # only for another user. So, the current user don't have
+            # permission to call the other user command.
+            raise BadCommand(
+                f"No permission to execute {cls.name!r} - install it "
+                f"locally, globally (ask admin), or check your PATH. "
+                f"See possible solutions at "
+                f"https://pip.pypa.io/en/latest/reference/pip_freeze/"
+                f"#fixing-permission-denied."
+            )
+
+    @classmethod
+    def is_repository_directory(cls, path: str) -> bool:
+        """
+        Return whether a directory path is a repository directory.
+        """
+        logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name)
+        return os.path.exists(os.path.join(path, cls.dirname))
+
+    @classmethod
+    def get_repository_root(cls, location: str) -> Optional[str]:
+        """
+        Return the "root" (top-level) directory controlled by the vcs,
+        or `None` if the directory is not in any.
+
+        It is meant to be overridden to implement smarter detection
+        mechanisms for specific vcs.
+
+        This can do more than is_repository_directory() alone. For
+        example, the Git override checks that Git is actually available.
+        """
+        if cls.is_repository_directory(location):
+            return location
+        return None
diff --git a/venv/lib/python3.9/site-packages/pip/_internal/wheel_builder.py b/venv/lib/python3.9/site-packages/pip/_internal/wheel_builder.py
new file mode 100644
index 0000000..15b30af
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_internal/wheel_builder.py
@@ -0,0 +1,382 @@
+"""Orchestrator for building wheels from InstallRequirements.
+"""
+
+import logging
+import os.path
+import re
+import shutil
+from typing import Callable, Iterable, List, Optional, Tuple
+
+from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version
+from pip._vendor.packaging.version import InvalidVersion, Version
+
+from pip._internal.cache import WheelCache
+from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel
+from pip._internal.metadata import FilesystemWheel, get_wheel_distribution
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.operations.build.wheel import build_wheel_pep517
+from pip._internal.operations.build.wheel_editable import build_wheel_editable
+from pip._internal.operations.build.wheel_legacy import build_wheel_legacy
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.deprecation import (
+    LegacyInstallReasonMissingWheelPackage,
+    LegacyInstallReasonNoBinaryForcesSetuptoolsInstall,
+)
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed
+from pip._internal.utils.setuptools_build import make_setuptools_clean_args
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs import vcs
+
+logger = logging.getLogger(__name__)
+
+_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE)
+
+BdistWheelAllowedPredicate = Callable[[InstallRequirement], bool]
+BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]]
+
+
+def _contains_egg_info(s: str) -> bool:
+    """Determine whether the string looks like an egg_info.
+
+    :param s: The string to parse. E.g. foo-2.1
+    """
+    return bool(_egg_info_re.search(s))
+
+
+def _should_build(
+    req: InstallRequirement,
+    need_wheel: bool,
+    check_bdist_wheel: Optional[BdistWheelAllowedPredicate] = None,
+) -> bool:
+    """Return whether an InstallRequirement should be built into a wheel."""
+    if req.constraint:
+        # never build requirements that are merely constraints
+        return False
+    if req.is_wheel:
+        if need_wheel:
+            logger.info(
+                "Skipping %s, due to already being wheel.",
+                req.name,
+            )
+        return False
+
+    if need_wheel:
+        # i.e. pip wheel, not pip install
+        return True
+
+    # From this point, this concerns the pip install command only
+    # (need_wheel=False).
+
+    if not req.source_dir:
+        return False
+
+    if req.editable:
+        # we only build PEP 660 editable requirements
+        return req.supports_pyproject_editable()
+
+    if req.use_pep517:
+        return True
+
+    assert check_bdist_wheel is not None
+    if not check_bdist_wheel(req):
+        # /!\ When we change this to unconditionally return True, we must also remove
+        # support for `--install-option`. Indeed, `--install-option` implies
+        # `--no-binary` so we can return False here and run `setup.py install`.
+        # `--global-option` and `--build-option` can remain until we drop support for
+        # building with `setup.py bdist_wheel`.
+        req.legacy_install_reason = LegacyInstallReasonNoBinaryForcesSetuptoolsInstall
+        return False
+
+    if not is_wheel_installed():
+        # we don't build legacy requirements if wheel is not installed
+        req.legacy_install_reason = LegacyInstallReasonMissingWheelPackage
+        return False
+
+    return True
+
+
+def should_build_for_wheel_command(
+    req: InstallRequirement,
+) -> bool:
+    return _should_build(req, need_wheel=True)
+
+
+def should_build_for_install_command(
+    req: InstallRequirement,
+    check_bdist_wheel_allowed: BdistWheelAllowedPredicate,
+) -> bool:
+    return _should_build(
+        req, need_wheel=False, check_bdist_wheel=check_bdist_wheel_allowed
+    )
+
+
+def _should_cache(
+    req: InstallRequirement,
+) -> Optional[bool]:
+    """
+    Return whether a built InstallRequirement can be stored in the persistent
+    wheel cache, assuming the wheel cache is available, and _should_build()
+    has determined a wheel needs to be built.
+    """
+    if req.editable or not req.source_dir:
+        # never cache editable requirements
+        return False
+
+    if req.link and req.link.is_vcs:
+        # VCS checkout. Do not cache
+        # unless it points to an immutable commit hash.
+        assert not req.editable
+        assert req.source_dir
+        vcs_backend = vcs.get_backend_for_scheme(req.link.scheme)
+        assert vcs_backend
+        if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir):
+            return True
+        return False
+
+    assert req.link
+    base, ext = req.link.splitext()
+    if _contains_egg_info(base):
+        return True
+
+    # Otherwise, do not cache.
+    return False
+
+
+def _get_cache_dir(
+    req: InstallRequirement,
+    wheel_cache: WheelCache,
+) -> str:
+    """Return the persistent or temporary cache directory where the built
+    wheel need to be stored.
+    """
+    cache_available = bool(wheel_cache.cache_dir)
+    assert req.link
+    if cache_available and _should_cache(req):
+        cache_dir = wheel_cache.get_path_for_link(req.link)
+    else:
+        cache_dir = wheel_cache.get_ephem_path_for_link(req.link)
+    return cache_dir
+
+
+def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
+    canonical_name = canonicalize_name(req.name or "")
+    w = Wheel(os.path.basename(wheel_path))
+    if canonicalize_name(w.name) != canonical_name:
+        raise InvalidWheelFilename(
+            "Wheel has unexpected file name: expected {!r}, "
+            "got {!r}".format(canonical_name, w.name),
+        )
+    dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name)
+    dist_verstr = str(dist.version)
+    if canonicalize_version(dist_verstr) != canonicalize_version(w.version):
+        raise InvalidWheelFilename(
+            "Wheel has unexpected file name: expected {!r}, "
+            "got {!r}".format(dist_verstr, w.version),
+        )
+    metadata_version_value = dist.metadata_version
+    if metadata_version_value is None:
+        raise UnsupportedWheel("Missing Metadata-Version")
+    try:
+        metadata_version = Version(metadata_version_value)
+    except InvalidVersion:
+        msg = f"Invalid Metadata-Version: {metadata_version_value}"
+        raise UnsupportedWheel(msg)
+    if metadata_version >= Version("1.2") and not isinstance(dist.version, Version):
+        raise UnsupportedWheel(
+            "Metadata 1.2 mandates PEP 440 version, "
+            "but {!r} is not".format(dist_verstr)
+        )
+
+
+def _build_one(
+    req: InstallRequirement,
+    output_dir: str,
+    verify: bool,
+    build_options: List[str],
+    global_options: List[str],
+    editable: bool,
+) -> Optional[str]:
+    """Build one wheel.
+
+    :return: The filename of the built wheel, or None if the build failed.
+    """
+    artifact = "editable" if editable else "wheel"
+    try:
+        ensure_dir(output_dir)
+    except OSError as e:
+        logger.warning(
+            "Building %s for %s failed: %s",
+            artifact,
+            req.name,
+            e,
+        )
+        return None
+
+    # Install build deps into temporary directory (PEP 518)
+    with req.build_env:
+        wheel_path = _build_one_inside_env(
+            req, output_dir, build_options, global_options, editable
+        )
+    if wheel_path and verify:
+        try:
+            _verify_one(req, wheel_path)
+        except (InvalidWheelFilename, UnsupportedWheel) as e:
+            logger.warning("Built %s for %s is invalid: %s", artifact, req.name, e)
+            return None
+    return wheel_path
+
+
+def _build_one_inside_env(
+    req: InstallRequirement,
+    output_dir: str,
+    build_options: List[str],
+    global_options: List[str],
+    editable: bool,
+) -> Optional[str]:
+    with TempDirectory(kind="wheel") as temp_dir:
+        assert req.name
+        if req.use_pep517:
+            assert req.metadata_directory
+            assert req.pep517_backend
+            if global_options:
+                logger.warning(
+                    "Ignoring --global-option when building %s using PEP 517", req.name
+                )
+            if build_options:
+                logger.warning(
+                    "Ignoring --build-option when building %s using PEP 517", req.name
+                )
+            if editable:
+                wheel_path = build_wheel_editable(
+                    name=req.name,
+                    backend=req.pep517_backend,
+                    metadata_directory=req.metadata_directory,
+                    tempd=temp_dir.path,
+                )
+            else:
+                wheel_path = build_wheel_pep517(
+                    name=req.name,
+                    backend=req.pep517_backend,
+                    metadata_directory=req.metadata_directory,
+                    tempd=temp_dir.path,
+                )
+        else:
+            wheel_path = build_wheel_legacy(
+                name=req.name,
+                setup_py_path=req.setup_py_path,
+                source_dir=req.unpacked_source_directory,
+                global_options=global_options,
+                build_options=build_options,
+                tempd=temp_dir.path,
+            )
+
+        if wheel_path is not None:
+            wheel_name = os.path.basename(wheel_path)
+            dest_path = os.path.join(output_dir, wheel_name)
+            try:
+                wheel_hash, length = hash_file(wheel_path)
+                shutil.move(wheel_path, dest_path)
+                logger.info(
+                    "Created wheel for %s: filename=%s size=%d sha256=%s",
+                    req.name,
+                    wheel_name,
+                    length,
+                    wheel_hash.hexdigest(),
+                )
+                logger.info("Stored in directory: %s", output_dir)
+                return dest_path
+            except Exception as e:
+                logger.warning(
+                    "Building wheel for %s failed: %s",
+                    req.name,
+                    e,
+                )
+        # Ignore return, we can't do anything else useful.
+        if not req.use_pep517:
+            _clean_one_legacy(req, global_options)
+        return None
+
+
+def _clean_one_legacy(req: InstallRequirement, global_options: List[str]) -> bool:
+    clean_args = make_setuptools_clean_args(
+        req.setup_py_path,
+        global_options=global_options,
+    )
+
+    logger.info("Running setup.py clean for %s", req.name)
+    try:
+        call_subprocess(
+            clean_args, command_desc="python setup.py clean", cwd=req.source_dir
+        )
+        return True
+    except Exception:
+        logger.error("Failed cleaning build dir for %s", req.name)
+        return False
+
+
+def build(
+    requirements: Iterable[InstallRequirement],
+    wheel_cache: WheelCache,
+    verify: bool,
+    build_options: List[str],
+    global_options: List[str],
+) -> BuildResult:
+    """Build wheels.
+
+    :return: The list of InstallRequirement that succeeded to build and
+        the list of InstallRequirement that failed to build.
+    """
+    if not requirements:
+        return [], []
+
+    # Build the wheels.
+    logger.info(
+        "Building wheels for collected packages: %s",
+        ", ".join(req.name for req in requirements),  # type: ignore
+    )
+
+    with indent_log():
+        build_successes, build_failures = [], []
+        for req in requirements:
+            assert req.name
+            cache_dir = _get_cache_dir(req, wheel_cache)
+            wheel_file = _build_one(
+                req,
+                cache_dir,
+                verify,
+                build_options,
+                global_options,
+                req.editable and req.permit_editable_wheels,
+            )
+            if wheel_file:
+                # Record the download origin in the cache
+                if req.download_info is not None:
+                    # download_info is guaranteed to be set because when we build an
+                    # InstallRequirement it has been through the preparer before, but
+                    # let's be cautious.
+                    wheel_cache.record_download_origin(cache_dir, req.download_info)
+                # Update the link for this.
+                req.link = Link(path_to_url(wheel_file))
+                req.local_file_path = req.link.file_path
+                assert req.link.is_wheel
+                build_successes.append(req)
+            else:
+                build_failures.append(req)
+
+    # notify success/failure
+    if build_successes:
+        logger.info(
+            "Successfully built %s",
+            " ".join([req.name for req in build_successes]),  # type: ignore
+        )
+    if build_failures:
+        logger.info(
+            "Failed to build %s",
+            " ".join([req.name for req in build_failures]),  # type: ignore
+        )
+    # Return a list of requirements that failed to build
+    return build_successes, build_failures
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/__init__.py b/venv/lib/python3.9/site-packages/pip/_vendor/__init__.py
new file mode 100644
index 0000000..b22f7ab
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/__init__.py
@@ -0,0 +1,120 @@
+"""
+pip._vendor is for vendoring dependencies of pip to prevent needing pip to
+depend on something external.
+
+Files inside of pip._vendor should be considered immutable and should only be
+updated to versions from upstream.
+"""
+from __future__ import absolute_import
+
+import glob
+import os.path
+import sys
+
+# Downstream redistributors which have debundled our dependencies should also
+# patch this value to be true. This will trigger the additional patching
+# to cause things like "six" to be available as pip.
+DEBUNDLED = False
+
+# By default, look in this directory for a bunch of .whl files which we will
+# add to the beginning of sys.path before attempting to import anything. This
+# is done to support downstream re-distributors like Debian and Fedora who
+# wish to create their own Wheels for our dependencies to aid in debundling.
+WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))
+
+
+# Define a small helper function to alias our vendored modules to the real ones
+# if the vendored ones do not exist. This idea of this was taken from
+# https://github.com/kennethreitz/requests/pull/2567.
+def vendored(modulename):
+    vendored_name = "{0}.{1}".format(__name__, modulename)
+
+    try:
+        __import__(modulename, globals(), locals(), level=0)
+    except ImportError:
+        # We can just silently allow import failures to pass here. If we
+        # got to this point it means that ``import pip._vendor.whatever``
+        # failed and so did ``import whatever``. Since we're importing this
+        # upfront in an attempt to alias imports, not erroring here will
+        # just mean we get a regular import error whenever pip *actually*
+        # tries to import one of these modules to use it, which actually
+        # gives us a better error message than we would have otherwise
+        # gotten.
+        pass
+    else:
+        sys.modules[vendored_name] = sys.modules[modulename]
+        base, head = vendored_name.rsplit(".", 1)
+        setattr(sys.modules[base], head, sys.modules[modulename])
+
+
+# If we're operating in a debundled setup, then we want to go ahead and trigger
+# the aliasing of our vendored libraries as well as looking for wheels to add
+# to our sys.path. This will cause all of this code to be a no-op typically
+# however downstream redistributors can enable it in a consistent way across
+# all platforms.
+if DEBUNDLED:
+    # Actually look inside of WHEEL_DIR to find .whl files and add them to the
+    # front of our sys.path.
+    sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
+
+    # Actually alias all of our vendored dependencies.
+    vendored("cachecontrol")
+    vendored("certifi")
+    vendored("colorama")
+    vendored("distlib")
+    vendored("distro")
+    vendored("six")
+    vendored("six.moves")
+    vendored("six.moves.urllib")
+    vendored("six.moves.urllib.parse")
+    vendored("packaging")
+    vendored("packaging.version")
+    vendored("packaging.specifiers")
+    vendored("pep517")
+    vendored("pkg_resources")
+    vendored("platformdirs")
+    vendored("progress")
+    vendored("requests")
+    vendored("requests.exceptions")
+    vendored("requests.packages")
+    vendored("requests.packages.urllib3")
+    vendored("requests.packages.urllib3._collections")
+    vendored("requests.packages.urllib3.connection")
+    vendored("requests.packages.urllib3.connectionpool")
+    vendored("requests.packages.urllib3.contrib")
+    vendored("requests.packages.urllib3.contrib.ntlmpool")
+    vendored("requests.packages.urllib3.contrib.pyopenssl")
+    vendored("requests.packages.urllib3.exceptions")
+    vendored("requests.packages.urllib3.fields")
+    vendored("requests.packages.urllib3.filepost")
+    vendored("requests.packages.urllib3.packages")
+    vendored("requests.packages.urllib3.packages.ordered_dict")
+    vendored("requests.packages.urllib3.packages.six")
+    vendored("requests.packages.urllib3.packages.ssl_match_hostname")
+    vendored("requests.packages.urllib3.packages.ssl_match_hostname."
+             "_implementation")
+    vendored("requests.packages.urllib3.poolmanager")
+    vendored("requests.packages.urllib3.request")
+    vendored("requests.packages.urllib3.response")
+    vendored("requests.packages.urllib3.util")
+    vendored("requests.packages.urllib3.util.connection")
+    vendored("requests.packages.urllib3.util.request")
+    vendored("requests.packages.urllib3.util.response")
+    vendored("requests.packages.urllib3.util.retry")
+    vendored("requests.packages.urllib3.util.ssl_")
+    vendored("requests.packages.urllib3.util.timeout")
+    vendored("requests.packages.urllib3.util.url")
+    vendored("resolvelib")
+    vendored("rich")
+    vendored("rich.console")
+    vendored("rich.highlighter")
+    vendored("rich.logging")
+    vendored("rich.markup")
+    vendored("rich.progress")
+    vendored("rich.segment")
+    vendored("rich.style")
+    vendored("rich.text")
+    vendored("rich.traceback")
+    vendored("tenacity")
+    vendored("tomli")
+    vendored("urllib3")
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..17dcc03
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/six.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/six.cpython-39.pyc
new file mode 100644
index 0000000..7d8452f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/six.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-39.pyc
new file mode 100644
index 0000000..deb048d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__init__.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__init__.py
new file mode 100644
index 0000000..f631ae6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__init__.py
@@ -0,0 +1,18 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+"""CacheControl import Interface.
+
+Make it easy to import from cachecontrol without long namespaces.
+"""
+__author__ = "Eric Larson"
+__email__ = "eric@ionrock.org"
+__version__ = "0.12.11"
+
+from .wrapper import CacheControl
+from .adapter import CacheControlAdapter
+from .controller import CacheController
+
+import logging
+logging.getLogger(__name__).addHandler(logging.NullHandler())
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..04bf2ff
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc
new file mode 100644
index 0000000..62a5215
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc
new file mode 100644
index 0000000..06e4943
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc
new file mode 100644
index 0000000..1892487
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc
new file mode 100644
index 0000000..0e37a4a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc
new file mode 100644
index 0000000..fada7ee
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc
new file mode 100644
index 0000000..f3183f7
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc
new file mode 100644
index 0000000..4b6697f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc
new file mode 100644
index 0000000..dfa0e8c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc
new file mode 100644
index 0000000..b2fc851
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/_cmd.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/_cmd.py
new file mode 100644
index 0000000..4266b5e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/_cmd.py
@@ -0,0 +1,61 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+import logging
+
+from pip._vendor import requests
+
+from pip._vendor.cachecontrol.adapter import CacheControlAdapter
+from pip._vendor.cachecontrol.cache import DictCache
+from pip._vendor.cachecontrol.controller import logger
+
+from argparse import ArgumentParser
+
+
+def setup_logging():
+    logger.setLevel(logging.DEBUG)
+    handler = logging.StreamHandler()
+    logger.addHandler(handler)
+
+
+def get_session():
+    adapter = CacheControlAdapter(
+        DictCache(), cache_etags=True, serializer=None, heuristic=None
+    )
+    sess = requests.Session()
+    sess.mount("http://", adapter)
+    sess.mount("https://", adapter)
+
+    sess.cache_controller = adapter.controller
+    return sess
+
+
+def get_args():
+    parser = ArgumentParser()
+    parser.add_argument("url", help="The URL to try and cache")
+    return parser.parse_args()
+
+
+def main(args=None):
+    args = get_args()
+    sess = get_session()
+
+    # Make a request to get a response
+    resp = sess.get(args.url)
+
+    # Turn on logging
+    setup_logging()
+
+    # try setting the cache
+    sess.cache_controller.cache_response(resp.request, resp.raw)
+
+    # Now try to get it
+    if sess.cache_controller.cached_request(resp.request):
+        print("Cached!")
+    else:
+        print("Not cached :(")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/adapter.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/adapter.py
new file mode 100644
index 0000000..94c75e1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/adapter.py
@@ -0,0 +1,137 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+import types
+import functools
+import zlib
+
+from pip._vendor.requests.adapters import HTTPAdapter
+
+from .controller import CacheController, PERMANENT_REDIRECT_STATUSES
+from .cache import DictCache
+from .filewrapper import CallbackFileWrapper
+
+
+class CacheControlAdapter(HTTPAdapter):
+    invalidating_methods = {"PUT", "PATCH", "DELETE"}
+
+    def __init__(
+        self,
+        cache=None,
+        cache_etags=True,
+        controller_class=None,
+        serializer=None,
+        heuristic=None,
+        cacheable_methods=None,
+        *args,
+        **kw
+    ):
+        super(CacheControlAdapter, self).__init__(*args, **kw)
+        self.cache = DictCache() if cache is None else cache
+        self.heuristic = heuristic
+        self.cacheable_methods = cacheable_methods or ("GET",)
+
+        controller_factory = controller_class or CacheController
+        self.controller = controller_factory(
+            self.cache, cache_etags=cache_etags, serializer=serializer
+        )
+
+    def send(self, request, cacheable_methods=None, **kw):
+        """
+        Send a request. Use the request information to see if it
+        exists in the cache and cache the response if we need to and can.
+        """
+        cacheable = cacheable_methods or self.cacheable_methods
+        if request.method in cacheable:
+            try:
+                cached_response = self.controller.cached_request(request)
+            except zlib.error:
+                cached_response = None
+            if cached_response:
+                return self.build_response(request, cached_response, from_cache=True)
+
+            # check for etags and add headers if appropriate
+            request.headers.update(self.controller.conditional_headers(request))
+
+        resp = super(CacheControlAdapter, self).send(request, **kw)
+
+        return resp
+
+    def build_response(
+        self, request, response, from_cache=False, cacheable_methods=None
+    ):
+        """
+        Build a response by making a request or using the cache.
+
+        This will end up calling send and returning a potentially
+        cached response
+        """
+        cacheable = cacheable_methods or self.cacheable_methods
+        if not from_cache and request.method in cacheable:
+            # Check for any heuristics that might update headers
+            # before trying to cache.
+            if self.heuristic:
+                response = self.heuristic.apply(response)
+
+            # apply any expiration heuristics
+            if response.status == 304:
+                # We must have sent an ETag request. This could mean
+                # that we've been expired already or that we simply
+                # have an etag. In either case, we want to try and
+                # update the cache if that is the case.
+                cached_response = self.controller.update_cached_response(
+                    request, response
+                )
+
+                if cached_response is not response:
+                    from_cache = True
+
+                # We are done with the server response, read a
+                # possible response body (compliant servers will
+                # not return one, but we cannot be 100% sure) and
+                # release the connection back to the pool.
+                response.read(decode_content=False)
+                response.release_conn()
+
+                response = cached_response
+
+            # We always cache the 301 responses
+            elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
+                self.controller.cache_response(request, response)
+            else:
+                # Wrap the response file with a wrapper that will cache the
+                #   response when the stream has been consumed.
+                response._fp = CallbackFileWrapper(
+                    response._fp,
+                    functools.partial(
+                        self.controller.cache_response, request, response
+                    ),
+                )
+                if response.chunked:
+                    super_update_chunk_length = response._update_chunk_length
+
+                    def _update_chunk_length(self):
+                        super_update_chunk_length()
+                        if self.chunk_left == 0:
+                            self._fp._close()
+
+                    response._update_chunk_length = types.MethodType(
+                        _update_chunk_length, response
+                    )
+
+        resp = super(CacheControlAdapter, self).build_response(request, response)
+
+        # See if we should invalidate the cache.
+        if request.method in self.invalidating_methods and resp.ok:
+            cache_url = self.controller.cache_url(request.url)
+            self.cache.delete(cache_url)
+
+        # Give the request a from_cache attr to let people use it
+        resp.from_cache = from_cache
+
+        return resp
+
+    def close(self):
+        self.cache.close()
+        super(CacheControlAdapter, self).close()
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/cache.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/cache.py
new file mode 100644
index 0000000..2a965f5
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/cache.py
@@ -0,0 +1,65 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+"""
+The cache object API for implementing caches. The default is a thread
+safe in-memory dictionary.
+"""
+from threading import Lock
+
+
+class BaseCache(object):
+
+    def get(self, key):
+        raise NotImplementedError()
+
+    def set(self, key, value, expires=None):
+        raise NotImplementedError()
+
+    def delete(self, key):
+        raise NotImplementedError()
+
+    def close(self):
+        pass
+
+
+class DictCache(BaseCache):
+
+    def __init__(self, init_dict=None):
+        self.lock = Lock()
+        self.data = init_dict or {}
+
+    def get(self, key):
+        return self.data.get(key, None)
+
+    def set(self, key, value, expires=None):
+        with self.lock:
+            self.data.update({key: value})
+
+    def delete(self, key):
+        with self.lock:
+            if key in self.data:
+                self.data.pop(key)
+
+
+class SeparateBodyBaseCache(BaseCache):
+    """
+    In this variant, the body is not stored mixed in with the metadata, but is
+    passed in (as a bytes-like object) in a separate call to ``set_body()``.
+
+    That is, the expected interaction pattern is::
+
+        cache.set(key, serialized_metadata)
+        cache.set_body(key)
+
+    Similarly, the body should be loaded separately via ``get_body()``.
+    """
+    def set_body(self, key, body):
+        raise NotImplementedError()
+
+    def get_body(self, key):
+        """
+        Return the body as file-like object.
+        """
+        raise NotImplementedError()
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
new file mode 100644
index 0000000..3782729
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
@@ -0,0 +1,9 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from .file_cache import FileCache, SeparateBodyFileCache
+from .redis_cache import RedisCache
+
+
+__all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"]
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..91f4fb6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc
new file mode 100644
index 0000000..b3d0db8
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc
new file mode 100644
index 0000000..188ee14
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
new file mode 100644
index 0000000..f1ddb2e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
@@ -0,0 +1,188 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+import hashlib
+import os
+from textwrap import dedent
+
+from ..cache import BaseCache, SeparateBodyBaseCache
+from ..controller import CacheController
+
+try:
+    FileNotFoundError
+except NameError:
+    # py2.X
+    FileNotFoundError = (IOError, OSError)
+
+
+def _secure_open_write(filename, fmode):
+    # We only want to write to this file, so open it in write only mode
+    flags = os.O_WRONLY
+
+    # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
+    #  will open *new* files.
+    # We specify this because we want to ensure that the mode we pass is the
+    # mode of the file.
+    flags |= os.O_CREAT | os.O_EXCL
+
+    # Do not follow symlinks to prevent someone from making a symlink that
+    # we follow and insecurely open a cache file.
+    if hasattr(os, "O_NOFOLLOW"):
+        flags |= os.O_NOFOLLOW
+
+    # On Windows we'll mark this file as binary
+    if hasattr(os, "O_BINARY"):
+        flags |= os.O_BINARY
+
+    # Before we open our file, we want to delete any existing file that is
+    # there
+    try:
+        os.remove(filename)
+    except (IOError, OSError):
+        # The file must not exist already, so we can just skip ahead to opening
+        pass
+
+    # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
+    # race condition happens between the os.remove and this line, that an
+    # error will be raised. Because we utilize a lockfile this should only
+    # happen if someone is attempting to attack us.
+    fd = os.open(filename, flags, fmode)
+    try:
+        return os.fdopen(fd, "wb")
+
+    except:
+        # An error occurred wrapping our FD in a file object
+        os.close(fd)
+        raise
+
+
+class _FileCacheMixin:
+    """Shared implementation for both FileCache variants."""
+
+    def __init__(
+        self,
+        directory,
+        forever=False,
+        filemode=0o0600,
+        dirmode=0o0700,
+        use_dir_lock=None,
+        lock_class=None,
+    ):
+
+        if use_dir_lock is not None and lock_class is not None:
+            raise ValueError("Cannot use use_dir_lock and lock_class together")
+
+        try:
+            from lockfile import LockFile
+            from lockfile.mkdirlockfile import MkdirLockFile
+        except ImportError:
+            notice = dedent(
+                """
+            NOTE: In order to use the FileCache you must have
+            lockfile installed. You can install it via pip:
+              pip install lockfile
+            """
+            )
+            raise ImportError(notice)
+
+        else:
+            if use_dir_lock:
+                lock_class = MkdirLockFile
+
+            elif lock_class is None:
+                lock_class = LockFile
+
+        self.directory = directory
+        self.forever = forever
+        self.filemode = filemode
+        self.dirmode = dirmode
+        self.lock_class = lock_class
+
+    @staticmethod
+    def encode(x):
+        return hashlib.sha224(x.encode()).hexdigest()
+
+    def _fn(self, name):
+        # NOTE: This method should not change as some may depend on it.
+        #       See: https://github.com/ionrock/cachecontrol/issues/63
+        hashed = self.encode(name)
+        parts = list(hashed[:5]) + [hashed]
+        return os.path.join(self.directory, *parts)
+
+    def get(self, key):
+        name = self._fn(key)
+        try:
+            with open(name, "rb") as fh:
+                return fh.read()
+
+        except FileNotFoundError:
+            return None
+
+    def set(self, key, value, expires=None):
+        name = self._fn(key)
+        self._write(name, value)
+
+    def _write(self, path, data: bytes):
+        """
+        Safely write the data to the given path.
+        """
+        # Make sure the directory exists
+        try:
+            os.makedirs(os.path.dirname(path), self.dirmode)
+        except (IOError, OSError):
+            pass
+
+        with self.lock_class(path) as lock:
+            # Write our actual file
+            with _secure_open_write(lock.path, self.filemode) as fh:
+                fh.write(data)
+
+    def _delete(self, key, suffix):
+        name = self._fn(key) + suffix
+        if not self.forever:
+            try:
+                os.remove(name)
+            except FileNotFoundError:
+                pass
+
+
+class FileCache(_FileCacheMixin, BaseCache):
+    """
+    Traditional FileCache: body is stored in memory, so not suitable for large
+    downloads.
+    """
+
+    def delete(self, key):
+        self._delete(key, "")
+
+
+class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache):
+    """
+    Memory-efficient FileCache: body is stored in a separate file, reducing
+    peak memory usage.
+    """
+
+    def get_body(self, key):
+        name = self._fn(key) + ".body"
+        try:
+            return open(name, "rb")
+        except FileNotFoundError:
+            return None
+
+    def set_body(self, key, body):
+        name = self._fn(key) + ".body"
+        self._write(name, body)
+
+    def delete(self, key):
+        self._delete(key, "")
+        self._delete(key, ".body")
+
+
+def url_to_file_path(url, filecache):
+    """Return the file cache path based on the URL.
+
+    This does not ensure the file exists!
+    """
+    key = CacheController.cache_url(url)
+    return filecache._fn(key)
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
new file mode 100644
index 0000000..2cba4b0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
@@ -0,0 +1,39 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from __future__ import division
+
+from datetime import datetime
+from pip._vendor.cachecontrol.cache import BaseCache
+
+
+class RedisCache(BaseCache):
+
+    def __init__(self, conn):
+        self.conn = conn
+
+    def get(self, key):
+        return self.conn.get(key)
+
+    def set(self, key, value, expires=None):
+        if not expires:
+            self.conn.set(key, value)
+        elif isinstance(expires, datetime):
+            expires = expires - datetime.utcnow()
+            self.conn.setex(key, int(expires.total_seconds()), value)
+        else:
+            self.conn.setex(key, expires, value)
+
+    def delete(self, key):
+        self.conn.delete(key)
+
+    def clear(self):
+        """Helper for clearing all the keys in a database. Use with
+        caution!"""
+        for key in self.conn.keys():
+            self.conn.delete(key)
+
+    def close(self):
+        """Redis uses connection pooling, no need to close the connection."""
+        pass
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/compat.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/compat.py
new file mode 100644
index 0000000..ccec937
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/compat.py
@@ -0,0 +1,32 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+try:
+    from urllib.parse import urljoin
+except ImportError:
+    from urlparse import urljoin
+
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+
+# Handle the case where the requests module has been patched to not have
+# urllib3 bundled as part of its source.
+try:
+    from pip._vendor.requests.packages.urllib3.response import HTTPResponse
+except ImportError:
+    from pip._vendor.urllib3.response import HTTPResponse
+
+try:
+    from pip._vendor.requests.packages.urllib3.util import is_fp_closed
+except ImportError:
+    from pip._vendor.urllib3.util import is_fp_closed
+
+# Replicate some six behaviour
+try:
+    text_type = unicode
+except NameError:
+    text_type = str
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/controller.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/controller.py
new file mode 100644
index 0000000..7f23529
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/controller.py
@@ -0,0 +1,439 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+"""
+The httplib2 algorithms ported for use with requests.
+"""
+import logging
+import re
+import calendar
+import time
+from email.utils import parsedate_tz
+
+from pip._vendor.requests.structures import CaseInsensitiveDict
+
+from .cache import DictCache, SeparateBodyBaseCache
+from .serialize import Serializer
+
+
+logger = logging.getLogger(__name__)
+
+URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
+
+PERMANENT_REDIRECT_STATUSES = (301, 308)
+
+
+def parse_uri(uri):
+    """Parses a URI using the regex given in Appendix B of RFC 3986.
+
+    (scheme, authority, path, query, fragment) = parse_uri(uri)
+    """
+    groups = URI.match(uri).groups()
+    return (groups[1], groups[3], groups[4], groups[6], groups[8])
+
+
+class CacheController(object):
+    """An interface to see if request should cached or not."""
+
+    def __init__(
+        self, cache=None, cache_etags=True, serializer=None, status_codes=None
+    ):
+        self.cache = DictCache() if cache is None else cache
+        self.cache_etags = cache_etags
+        self.serializer = serializer or Serializer()
+        self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)
+
+    @classmethod
+    def _urlnorm(cls, uri):
+        """Normalize the URL to create a safe key for the cache"""
+        (scheme, authority, path, query, fragment) = parse_uri(uri)
+        if not scheme or not authority:
+            raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
+
+        scheme = scheme.lower()
+        authority = authority.lower()
+
+        if not path:
+            path = "/"
+
+        # Could do syntax based normalization of the URI before
+        # computing the digest. See Section 6.2.2 of Std 66.
+        request_uri = query and "?".join([path, query]) or path
+        defrag_uri = scheme + "://" + authority + request_uri
+
+        return defrag_uri
+
+    @classmethod
+    def cache_url(cls, uri):
+        return cls._urlnorm(uri)
+
+    def parse_cache_control(self, headers):
+        known_directives = {
+            # https://tools.ietf.org/html/rfc7234#section-5.2
+            "max-age": (int, True),
+            "max-stale": (int, False),
+            "min-fresh": (int, True),
+            "no-cache": (None, False),
+            "no-store": (None, False),
+            "no-transform": (None, False),
+            "only-if-cached": (None, False),
+            "must-revalidate": (None, False),
+            "public": (None, False),
+            "private": (None, False),
+            "proxy-revalidate": (None, False),
+            "s-maxage": (int, True),
+        }
+
+        cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
+
+        retval = {}
+
+        for cc_directive in cc_headers.split(","):
+            if not cc_directive.strip():
+                continue
+
+            parts = cc_directive.split("=", 1)
+            directive = parts[0].strip()
+
+            try:
+                typ, required = known_directives[directive]
+            except KeyError:
+                logger.debug("Ignoring unknown cache-control directive: %s", directive)
+                continue
+
+            if not typ or not required:
+                retval[directive] = None
+            if typ:
+                try:
+                    retval[directive] = typ(parts[1].strip())
+                except IndexError:
+                    if required:
+                        logger.debug(
+                            "Missing value for cache-control " "directive: %s",
+                            directive,
+                        )
+                except ValueError:
+                    logger.debug(
+                        "Invalid value for cache-control directive " "%s, must be %s",
+                        directive,
+                        typ.__name__,
+                    )
+
+        return retval
+
+    def cached_request(self, request):
+        """
+        Return a cached response if it exists in the cache, otherwise
+        return False.
+        """
+        cache_url = self.cache_url(request.url)
+        logger.debug('Looking up "%s" in the cache', cache_url)
+        cc = self.parse_cache_control(request.headers)
+
+        # Bail out if the request insists on fresh data
+        if "no-cache" in cc:
+            logger.debug('Request header has "no-cache", cache bypassed')
+            return False
+
+        if "max-age" in cc and cc["max-age"] == 0:
+            logger.debug('Request header has "max_age" as 0, cache bypassed')
+            return False
+
+        # Request allows serving from the cache, let's see if we find something
+        cache_data = self.cache.get(cache_url)
+        if cache_data is None:
+            logger.debug("No cache entry available")
+            return False
+
+        if isinstance(self.cache, SeparateBodyBaseCache):
+            body_file = self.cache.get_body(cache_url)
+        else:
+            body_file = None
+
+        # Check whether it can be deserialized
+        resp = self.serializer.loads(request, cache_data, body_file)
+        if not resp:
+            logger.warning("Cache entry deserialization failed, entry ignored")
+            return False
+
+        # If we have a cached permanent redirect, return it immediately. We
+        # don't need to test our response for other headers b/c it is
+        # intrinsically "cacheable" as it is Permanent.
+        #
+        # See:
+        #   https://tools.ietf.org/html/rfc7231#section-6.4.2
+        #
+        # Client can try to refresh the value by repeating the request
+        # with cache busting headers as usual (ie no-cache).
+        if int(resp.status) in PERMANENT_REDIRECT_STATUSES:
+            msg = (
+                "Returning cached permanent redirect response "
+                "(ignoring date and etag information)"
+            )
+            logger.debug(msg)
+            return resp
+
+        headers = CaseInsensitiveDict(resp.headers)
+        if not headers or "date" not in headers:
+            if "etag" not in headers:
+                # Without date or etag, the cached response can never be used
+                # and should be deleted.
+                logger.debug("Purging cached response: no date or etag")
+                self.cache.delete(cache_url)
+            logger.debug("Ignoring cached response: no date")
+            return False
+
+        now = time.time()
+        date = calendar.timegm(parsedate_tz(headers["date"]))
+        current_age = max(0, now - date)
+        logger.debug("Current age based on date: %i", current_age)
+
+        # TODO: There is an assumption that the result will be a
+        #       urllib3 response object. This may not be best since we
+        #       could probably avoid instantiating or constructing the
+        #       response until we know we need it.
+        resp_cc = self.parse_cache_control(headers)
+
+        # determine freshness
+        freshness_lifetime = 0
+
+        # Check the max-age pragma in the cache control header
+        if "max-age" in resp_cc:
+            freshness_lifetime = resp_cc["max-age"]
+            logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
+
+        # If there isn't a max-age, check for an expires header
+        elif "expires" in headers:
+            expires = parsedate_tz(headers["expires"])
+            if expires is not None:
+                expire_time = calendar.timegm(expires) - date
+                freshness_lifetime = max(0, expire_time)
+                logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
+
+        # Determine if we are setting freshness limit in the
+        # request. Note, this overrides what was in the response.
+        if "max-age" in cc:
+            freshness_lifetime = cc["max-age"]
+            logger.debug(
+                "Freshness lifetime from request max-age: %i", freshness_lifetime
+            )
+
+        if "min-fresh" in cc:
+            min_fresh = cc["min-fresh"]
+            # adjust our current age by our min fresh
+            current_age += min_fresh
+            logger.debug("Adjusted current age from min-fresh: %i", current_age)
+
+        # Return entry if it is fresh enough
+        if freshness_lifetime > current_age:
+            logger.debug('The response is "fresh", returning cached response')
+            logger.debug("%i > %i", freshness_lifetime, current_age)
+            return resp
+
+        # we're not fresh. If we don't have an Etag, clear it out
+        if "etag" not in headers:
+            logger.debug('The cached response is "stale" with no etag, purging')
+            self.cache.delete(cache_url)
+
+        # return the original handler
+        return False
+
+    def conditional_headers(self, request):
+        cache_url = self.cache_url(request.url)
+        resp = self.serializer.loads(request, self.cache.get(cache_url))
+        new_headers = {}
+
+        if resp:
+            headers = CaseInsensitiveDict(resp.headers)
+
+            if "etag" in headers:
+                new_headers["If-None-Match"] = headers["ETag"]
+
+            if "last-modified" in headers:
+                new_headers["If-Modified-Since"] = headers["Last-Modified"]
+
+        return new_headers
+
+    def _cache_set(self, cache_url, request, response, body=None, expires_time=None):
+        """
+        Store the data in the cache.
+        """
+        if isinstance(self.cache, SeparateBodyBaseCache):
+            # We pass in the body separately; just put a placeholder empty
+            # string in the metadata.
+            self.cache.set(
+                cache_url,
+                self.serializer.dumps(request, response, b""),
+                expires=expires_time,
+            )
+            self.cache.set_body(cache_url, body)
+        else:
+            self.cache.set(
+                cache_url,
+                self.serializer.dumps(request, response, body),
+                expires=expires_time,
+            )
+
+    def cache_response(self, request, response, body=None, status_codes=None):
+        """
+        Algorithm for caching requests.
+
+        This assumes a requests Response object.
+        """
+        # From httplib2: Don't cache 206's since we aren't going to
+        #                handle byte range requests
+        cacheable_status_codes = status_codes or self.cacheable_status_codes
+        if response.status not in cacheable_status_codes:
+            logger.debug(
+                "Status code %s not in %s", response.status, cacheable_status_codes
+            )
+            return
+
+        response_headers = CaseInsensitiveDict(response.headers)
+
+        if "date" in response_headers:
+            date = calendar.timegm(parsedate_tz(response_headers["date"]))
+        else:
+            date = 0
+
+        # If we've been given a body, our response has a Content-Length, that
+        # Content-Length is valid then we can check to see if the body we've
+        # been given matches the expected size, and if it doesn't we'll just
+        # skip trying to cache it.
+        if (
+            body is not None
+            and "content-length" in response_headers
+            and response_headers["content-length"].isdigit()
+            and int(response_headers["content-length"]) != len(body)
+        ):
+            return
+
+        cc_req = self.parse_cache_control(request.headers)
+        cc = self.parse_cache_control(response_headers)
+
+        cache_url = self.cache_url(request.url)
+        logger.debug('Updating cache with response from "%s"', cache_url)
+
+        # Delete it from the cache if we happen to have it stored there
+        no_store = False
+        if "no-store" in cc:
+            no_store = True
+            logger.debug('Response header has "no-store"')
+        if "no-store" in cc_req:
+            no_store = True
+            logger.debug('Request header has "no-store"')
+        if no_store and self.cache.get(cache_url):
+            logger.debug('Purging existing cache entry to honor "no-store"')
+            self.cache.delete(cache_url)
+        if no_store:
+            return
+
+        # https://tools.ietf.org/html/rfc7234#section-4.1:
+        # A Vary header field-value of "*" always fails to match.
+        # Storing such a response leads to a deserialization warning
+        # during cache lookup and is not allowed to ever be served,
+        # so storing it can be avoided.
+        if "*" in response_headers.get("vary", ""):
+            logger.debug('Response header has "Vary: *"')
+            return
+
+        # If we've been given an etag, then keep the response
+        if self.cache_etags and "etag" in response_headers:
+            expires_time = 0
+            if response_headers.get("expires"):
+                expires = parsedate_tz(response_headers["expires"])
+                if expires is not None:
+                    expires_time = calendar.timegm(expires) - date
+
+            expires_time = max(expires_time, 14 * 86400)
+
+            logger.debug("etag object cached for {0} seconds".format(expires_time))
+            logger.debug("Caching due to etag")
+            self._cache_set(cache_url, request, response, body, expires_time)
+
+        # Add to the cache any permanent redirects. We do this before looking
+        # that the Date headers.
+        elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
+            logger.debug("Caching permanent redirect")
+            self._cache_set(cache_url, request, response, b"")
+
+        # Add to the cache if the response headers demand it. If there
+        # is no date header then we can't do anything about expiring
+        # the cache.
+        elif "date" in response_headers:
+            date = calendar.timegm(parsedate_tz(response_headers["date"]))
+            # cache when there is a max-age > 0
+            if "max-age" in cc and cc["max-age"] > 0:
+                logger.debug("Caching b/c date exists and max-age > 0")
+                expires_time = cc["max-age"]
+                self._cache_set(
+                    cache_url,
+                    request,
+                    response,
+                    body,
+                    expires_time,
+                )
+
+            # If the request can expire, it means we should cache it
+            # in the meantime.
+            elif "expires" in response_headers:
+                if response_headers["expires"]:
+                    expires = parsedate_tz(response_headers["expires"])
+                    if expires is not None:
+                        expires_time = calendar.timegm(expires) - date
+                    else:
+                        expires_time = None
+
+                    logger.debug(
+                        "Caching b/c of expires header. expires in {0} seconds".format(
+                            expires_time
+                        )
+                    )
+                    self._cache_set(
+                        cache_url,
+                        request,
+                        response,
+                        body,
+                        expires_time,
+                    )
+
+    def update_cached_response(self, request, response):
+        """On a 304 we will get a new set of headers that we want to
+        update our cached value with, assuming we have one.
+
+        This should only ever be called when we've sent an ETag and
+        gotten a 304 as the response.
+        """
+        cache_url = self.cache_url(request.url)
+
+        cached_response = self.serializer.loads(request, self.cache.get(cache_url))
+
+        if not cached_response:
+            # we didn't have a cached response
+            return response
+
+        # Lets update our headers with the headers from the new request:
+        # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
+        #
+        # The server isn't supposed to send headers that would make
+        # the cached body invalid. But... just in case, we'll be sure
+        # to strip out ones we know that might be problmatic due to
+        # typical assumptions.
+        excluded_headers = ["content-length"]
+
+        cached_response.headers.update(
+            dict(
+                (k, v)
+                for k, v in response.headers.items()
+                if k.lower() not in excluded_headers
+            )
+        )
+
+        # we want a 200 b/c we have content via the cache
+        cached_response.status = 200
+
+        # update our cache
+        self._cache_set(cache_url, request, cached_response)
+
+        return cached_response
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/filewrapper.py
new file mode 100644
index 0000000..f5ed5f6
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/filewrapper.py
@@ -0,0 +1,111 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from tempfile import NamedTemporaryFile
+import mmap
+
+
+class CallbackFileWrapper(object):
+    """
+    Small wrapper around a fp object which will tee everything read into a
+    buffer, and when that file is closed it will execute a callback with the
+    contents of that buffer.
+
+    All attributes are proxied to the underlying file object.
+
+    This class uses members with a double underscore (__) leading prefix so as
+    not to accidentally shadow an attribute.
+
+    The data is stored in a temporary file until it is all available.  As long
+    as the temporary files directory is disk-based (sometimes it's a
+    memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory
+    pressure is high.  For small files the disk usually won't be used at all,
+    it'll all be in the filesystem memory cache, so there should be no
+    performance impact.
+    """
+
+    def __init__(self, fp, callback):
+        self.__buf = NamedTemporaryFile("rb+", delete=True)
+        self.__fp = fp
+        self.__callback = callback
+
+    def __getattr__(self, name):
+        # The vaguaries of garbage collection means that self.__fp is
+        # not always set.  By using __getattribute__ and the private
+        # name[0] allows looking up the attribute value and raising an
+        # AttributeError when it doesn't exist. This stop thigns from
+        # infinitely recursing calls to getattr in the case where
+        # self.__fp hasn't been set.
+        #
+        # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
+        fp = self.__getattribute__("_CallbackFileWrapper__fp")
+        return getattr(fp, name)
+
+    def __is_fp_closed(self):
+        try:
+            return self.__fp.fp is None
+
+        except AttributeError:
+            pass
+
+        try:
+            return self.__fp.closed
+
+        except AttributeError:
+            pass
+
+        # We just don't cache it then.
+        # TODO: Add some logging here...
+        return False
+
+    def _close(self):
+        if self.__callback:
+            if self.__buf.tell() == 0:
+                # Empty file:
+                result = b""
+            else:
+                # Return the data without actually loading it into memory,
+                # relying on Python's buffer API and mmap(). mmap() just gives
+                # a view directly into the filesystem's memory cache, so it
+                # doesn't result in duplicate memory use.
+                self.__buf.seek(0, 0)
+                result = memoryview(
+                    mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ)
+                )
+            self.__callback(result)
+
+        # We assign this to None here, because otherwise we can get into
+        # really tricky problems where the CPython interpreter dead locks
+        # because the callback is holding a reference to something which
+        # has a __del__ method. Setting this to None breaks the cycle
+        # and allows the garbage collector to do it's thing normally.
+        self.__callback = None
+
+        # Closing the temporary file releases memory and frees disk space.
+        # Important when caching big files.
+        self.__buf.close()
+
+    def read(self, amt=None):
+        data = self.__fp.read(amt)
+        if data:
+            # We may be dealing with b'', a sign that things are over:
+            # it's passed e.g. after we've already closed self.__buf.
+            self.__buf.write(data)
+        if self.__is_fp_closed():
+            self._close()
+
+        return data
+
+    def _safe_read(self, amt):
+        data = self.__fp._safe_read(amt)
+        if amt == 2 and data == b"\r\n":
+            # urllib executes this read to toss the CRLF at the end
+            # of the chunk.
+            return data
+
+        self.__buf.write(data)
+        if self.__is_fp_closed():
+            self._close()
+
+        return data
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/heuristics.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/heuristics.py
new file mode 100644
index 0000000..ebe4a96
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/heuristics.py
@@ -0,0 +1,139 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+import calendar
+import time
+
+from email.utils import formatdate, parsedate, parsedate_tz
+
+from datetime import datetime, timedelta
+
+TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
+
+
+def expire_after(delta, date=None):
+    date = date or datetime.utcnow()
+    return date + delta
+
+
+def datetime_to_header(dt):
+    return formatdate(calendar.timegm(dt.timetuple()))
+
+
+class BaseHeuristic(object):
+
+    def warning(self, response):
+        """
+        Return a valid 1xx warning header value describing the cache
+        adjustments.
+
+        The response is provided too allow warnings like 113
+        http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
+        to explicitly say response is over 24 hours old.
+        """
+        return '110 - "Response is Stale"'
+
+    def update_headers(self, response):
+        """Update the response headers with any new headers.
+
+        NOTE: This SHOULD always include some Warning header to
+              signify that the response was cached by the client, not
+              by way of the provided headers.
+        """
+        return {}
+
+    def apply(self, response):
+        updated_headers = self.update_headers(response)
+
+        if updated_headers:
+            response.headers.update(updated_headers)
+            warning_header_value = self.warning(response)
+            if warning_header_value is not None:
+                response.headers.update({"Warning": warning_header_value})
+
+        return response
+
+
+class OneDayCache(BaseHeuristic):
+    """
+    Cache the response by providing an expires 1 day in the
+    future.
+    """
+
+    def update_headers(self, response):
+        headers = {}
+
+        if "expires" not in response.headers:
+            date = parsedate(response.headers["date"])
+            expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
+            headers["expires"] = datetime_to_header(expires)
+            headers["cache-control"] = "public"
+        return headers
+
+
+class ExpiresAfter(BaseHeuristic):
+    """
+    Cache **all** requests for a defined time period.
+    """
+
+    def __init__(self, **kw):
+        self.delta = timedelta(**kw)
+
+    def update_headers(self, response):
+        expires = expire_after(self.delta)
+        return {"expires": datetime_to_header(expires), "cache-control": "public"}
+
+    def warning(self, response):
+        tmpl = "110 - Automatically cached for %s. Response might be stale"
+        return tmpl % self.delta
+
+
+class LastModified(BaseHeuristic):
+    """
+    If there is no Expires header already, fall back on Last-Modified
+    using the heuristic from
+    http://tools.ietf.org/html/rfc7234#section-4.2.2
+    to calculate a reasonable value.
+
+    Firefox also does something like this per
+    https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
+    http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
+    Unlike mozilla we limit this to 24-hr.
+    """
+    cacheable_by_default_statuses = {
+        200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
+    }
+
+    def update_headers(self, resp):
+        headers = resp.headers
+
+        if "expires" in headers:
+            return {}
+
+        if "cache-control" in headers and headers["cache-control"] != "public":
+            return {}
+
+        if resp.status not in self.cacheable_by_default_statuses:
+            return {}
+
+        if "date" not in headers or "last-modified" not in headers:
+            return {}
+
+        date = calendar.timegm(parsedate_tz(headers["date"]))
+        last_modified = parsedate(headers["last-modified"])
+        if date is None or last_modified is None:
+            return {}
+
+        now = time.time()
+        current_age = max(0, now - date)
+        delta = date - calendar.timegm(last_modified)
+        freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
+        if freshness_lifetime <= current_age:
+            return {}
+
+        expires = date + freshness_lifetime
+        return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
+
+    def warning(self, resp):
+        return None
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/serialize.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/serialize.py
new file mode 100644
index 0000000..7fe1a3e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/serialize.py
@@ -0,0 +1,190 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+import base64
+import io
+import json
+import zlib
+
+from pip._vendor import msgpack
+from pip._vendor.requests.structures import CaseInsensitiveDict
+
+from .compat import HTTPResponse, pickle, text_type
+
+
+def _b64_decode_bytes(b):
+    return base64.b64decode(b.encode("ascii"))
+
+
+def _b64_decode_str(s):
+    return _b64_decode_bytes(s).decode("utf8")
+
+
+_default_body_read = object()
+
+
+class Serializer(object):
+    def dumps(self, request, response, body=None):
+        response_headers = CaseInsensitiveDict(response.headers)
+
+        if body is None:
+            # When a body isn't passed in, we'll read the response. We
+            # also update the response with a new file handler to be
+            # sure it acts as though it was never read.
+            body = response.read(decode_content=False)
+            response._fp = io.BytesIO(body)
+
+        # NOTE: This is all a bit weird, but it's really important that on
+        #       Python 2.x these objects are unicode and not str, even when
+        #       they contain only ascii. The problem here is that msgpack
+        #       understands the difference between unicode and bytes and we
+        #       have it set to differentiate between them, however Python 2
+        #       doesn't know the difference. Forcing these to unicode will be
+        #       enough to have msgpack know the difference.
+        data = {
+            u"response": {
+                u"body": body,  # Empty bytestring if body is stored separately
+                u"headers": dict(
+                    (text_type(k), text_type(v)) for k, v in response.headers.items()
+                ),
+                u"status": response.status,
+                u"version": response.version,
+                u"reason": text_type(response.reason),
+                u"strict": response.strict,
+                u"decode_content": response.decode_content,
+            }
+        }
+
+        # Construct our vary headers
+        data[u"vary"] = {}
+        if u"vary" in response_headers:
+            varied_headers = response_headers[u"vary"].split(",")
+            for header in varied_headers:
+                header = text_type(header).strip()
+                header_value = request.headers.get(header, None)
+                if header_value is not None:
+                    header_value = text_type(header_value)
+                data[u"vary"][header] = header_value
+
+        return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])
+
+    def loads(self, request, data, body_file=None):
+        # Short circuit if we've been given an empty set of data
+        if not data:
+            return
+
+        # Determine what version of the serializer the data was serialized
+        # with
+        try:
+            ver, data = data.split(b",", 1)
+        except ValueError:
+            ver = b"cc=0"
+
+        # Make sure that our "ver" is actually a version and isn't a false
+        # positive from a , being in the data stream.
+        if ver[:3] != b"cc=":
+            data = ver + data
+            ver = b"cc=0"
+
+        # Get the version number out of the cc=N
+        ver = ver.split(b"=", 1)[-1].decode("ascii")
+
+        # Dispatch to the actual load method for the given version
+        try:
+            return getattr(self, "_loads_v{}".format(ver))(request, data, body_file)
+
+        except AttributeError:
+            # This is a version we don't have a loads function for, so we'll
+            # just treat it as a miss and return None
+            return
+
+    def prepare_response(self, request, cached, body_file=None):
+        """Verify our vary headers match and construct a real urllib3
+        HTTPResponse object.
+        """
+        # Special case the '*' Vary value as it means we cannot actually
+        # determine if the cached response is suitable for this request.
+        # This case is also handled in the controller code when creating
+        # a cache entry, but is left here for backwards compatibility.
+        if "*" in cached.get("vary", {}):
+            return
+
+        # Ensure that the Vary headers for the cached response match our
+        # request
+        for header, value in cached.get("vary", {}).items():
+            if request.headers.get(header, None) != value:
+                return
+
+        body_raw = cached["response"].pop("body")
+
+        headers = CaseInsensitiveDict(data=cached["response"]["headers"])
+        if headers.get("transfer-encoding", "") == "chunked":
+            headers.pop("transfer-encoding")
+
+        cached["response"]["headers"] = headers
+
+        try:
+            if body_file is None:
+                body = io.BytesIO(body_raw)
+            else:
+                body = body_file
+        except TypeError:
+            # This can happen if cachecontrol serialized to v1 format (pickle)
+            # using Python 2. A Python 2 str(byte string) will be unpickled as
+            # a Python 3 str (unicode string), which will cause the above to
+            # fail with:
+            #
+            #     TypeError: 'str' does not support the buffer interface
+            body = io.BytesIO(body_raw.encode("utf8"))
+
+        return HTTPResponse(body=body, preload_content=False, **cached["response"])
+
+    def _loads_v0(self, request, data, body_file=None):
+        # The original legacy cache data. This doesn't contain enough
+        # information to construct everything we need, so we'll treat this as
+        # a miss.
+        return
+
+    def _loads_v1(self, request, data, body_file=None):
+        try:
+            cached = pickle.loads(data)
+        except ValueError:
+            return
+
+        return self.prepare_response(request, cached, body_file)
+
+    def _loads_v2(self, request, data, body_file=None):
+        assert body_file is None
+        try:
+            cached = json.loads(zlib.decompress(data).decode("utf8"))
+        except (ValueError, zlib.error):
+            return
+
+        # We need to decode the items that we've base64 encoded
+        cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
+        cached["response"]["headers"] = dict(
+            (_b64_decode_str(k), _b64_decode_str(v))
+            for k, v in cached["response"]["headers"].items()
+        )
+        cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
+        cached["vary"] = dict(
+            (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
+            for k, v in cached["vary"].items()
+        )
+
+        return self.prepare_response(request, cached, body_file)
+
+    def _loads_v3(self, request, data, body_file):
+        # Due to Python 2 encoding issues, it's impossible to know for sure
+        # exactly how to load v3 entries, thus we'll treat these as a miss so
+        # that they get rewritten out as v4 entries.
+        return
+
+    def _loads_v4(self, request, data, body_file=None):
+        try:
+            cached = msgpack.loads(data, raw=False)
+        except ValueError:
+            return
+
+        return self.prepare_response(request, cached, body_file)
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/wrapper.py b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/wrapper.py
new file mode 100644
index 0000000..b6ee7f2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/cachecontrol/wrapper.py
@@ -0,0 +1,33 @@
+# SPDX-FileCopyrightText: 2015 Eric Larson
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from .adapter import CacheControlAdapter
+from .cache import DictCache
+
+
+def CacheControl(
+    sess,
+    cache=None,
+    cache_etags=True,
+    serializer=None,
+    heuristic=None,
+    controller_class=None,
+    adapter_class=None,
+    cacheable_methods=None,
+):
+
+    cache = DictCache() if cache is None else cache
+    adapter_class = adapter_class or CacheControlAdapter
+    adapter = adapter_class(
+        cache,
+        cache_etags=cache_etags,
+        serializer=serializer,
+        heuristic=heuristic,
+        controller_class=controller_class,
+        cacheable_methods=cacheable_methods,
+    )
+    sess.mount("http://", adapter)
+    sess.mount("https://", adapter)
+
+    return sess
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__init__.py b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__init__.py
new file mode 100644
index 0000000..af4bcc1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__init__.py
@@ -0,0 +1,4 @@
+from .core import contents, where
+
+__all__ = ["contents", "where"]
+__version__ = "2022.09.24"
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__main__.py b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__main__.py
new file mode 100644
index 0000000..0037634
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__main__.py
@@ -0,0 +1,12 @@
+import argparse
+
+from pip._vendor.certifi import contents, where
+
+parser = argparse.ArgumentParser()
+parser.add_argument("-c", "--contents", action="store_true")
+args = parser.parse_args()
+
+if args.contents:
+    print(contents())
+else:
+    print(where())
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..abcf80f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc
new file mode 100644
index 0000000..9b08a16
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-39.pyc
new file mode 100644
index 0000000..4f5198b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/certifi/cacert.pem b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/cacert.pem
new file mode 100644
index 0000000..4005155
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/cacert.pem
@@ -0,0 +1,4708 @@
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
+# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
+# Label: "Security Communication Root CA"
+# Serial: 0
+# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
+# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
+# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Label: "Hongkong Post Root CA 1"
+# Serial: 1000
+# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
+# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
+# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
+-----BEGIN CERTIFICATE-----
+MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
+FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
+Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
+A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
+b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
+jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
+PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
+ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
+nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
+q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
+MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
+mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
+7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
+oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
+EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
+fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
+AmvZWg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 6047274297262753887
+# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
+# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
+# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
+# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
+# Label: "E-Tugra Certification Authority"
+# Serial: 7667447206703254355
+# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
+# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
+# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
+BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
+aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
+BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
+Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
+MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
+em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
+ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
+B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
+D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
+Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
+q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
+k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
+fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
+dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
+ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
+zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
+rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
+U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
+Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
+XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
+Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
+HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
+GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
+77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
+vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
+FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
+yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
+AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
+y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
+NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden EV Root CA"
+# Serial: 10000013
+# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
+# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
+# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
+-----BEGIN CERTIFICATE-----
+MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
+MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
+TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
+b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
+M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
+UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
+Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
+rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
+pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
+j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
+KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
+/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
+cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
+1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
+px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
+MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
+eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
+2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
+v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
+wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
+CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
+vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
+Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
+Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
+eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
+FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
+7uzXLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-1"
+# Serial: 15752444095811006489
+# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45
+# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a
+# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y
+IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB
+pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h
+IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG
+A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU
+cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid
+RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V
+seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme
+9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV
+EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW
+hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/
+DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I
+/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf
+ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ
+yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts
+L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN
+zl/HHk484IkzlQsPpTLWPFp5LBk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-2"
+# Serial: 2711694510199101698
+# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64
+# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0
+# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65
+-----BEGIN CERTIFICATE-----
+MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig
+Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk
+MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg
+Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD
+VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy
+dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+
+QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq
+1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp
+2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK
+DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape
+az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF
+3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88
+oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM
+g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3
+mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh
+8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd
+BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U
+nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX
+dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+
+MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL
+/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX
+CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa
+ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW
+2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7
+N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3
+Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB
+As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp
+5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu
+1uwJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor ECA-1"
+# Serial: 9548242946988625984
+# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c
+# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd
+# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y
+IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig
+RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb
+3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA
+BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5
+3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou
+owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/
+wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF
+ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf
+BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/
+MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv
+civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2
+AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F
+hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50
+soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI
+WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi
+tJ/X5g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Label: "GlobalSign Root CA - R6"
+# Serial: 1417766617973444989252670301619537
+# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
+# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
+# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
+MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
+MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
+MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
+xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
+ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
+aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
+LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
+1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
+k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
+SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
+bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
+WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
+rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
+MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
+bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
+nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
+Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
+55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
+vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
+cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
+oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
+nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
+pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
+JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
+8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
+5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GC CA"
+# Serial: 44084345621038548146064804565436152554
+# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
+# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
+# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
+-----BEGIN CERTIFICATE-----
+MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
+CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
+bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
+Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
+BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
+ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
+b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
+eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
+p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
+rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
+57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
+Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Global G2 Root O=UniTrust
+# Subject: CN=UCA Global G2 Root O=UniTrust
+# Label: "UCA Global G2 Root"
+# Serial: 124779693093741543919145257850076631279
+# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
+# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
+# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
+bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
+CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
+b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
+b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
+kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
+VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
+VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
+C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
+tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
+D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
+j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
+NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
+iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
+O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
+ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
+L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
+1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
+1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
+b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
+PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
+y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
+EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
+DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
+YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
+UB+K+wb1whnw0A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Extended Validation Root O=UniTrust
+# Subject: CN=UCA Extended Validation Root O=UniTrust
+# Label: "UCA Extended Validation Root"
+# Serial: 106100277556486529736699587978573607008
+# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
+# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
+# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
+eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
+MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
+BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
+D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
+sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
+O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
+sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
+c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
+VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
+KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
+TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
+sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
+1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
+fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
+AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
+l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
+ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
+VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
+c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
+4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
+t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
+2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
+vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
+xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
+cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
+fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Label: "Certigna Root CA"
+# Serial: 269714418870597844693661054334862075617
+# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
+# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
+# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
+-----BEGIN CERTIFICATE-----
+MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
+WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
+MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
+MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
+VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
+BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
+ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
+ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
+CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
+I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
+TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
+C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
+ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
+IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
+Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
+JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
+hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
+GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
+1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
+L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
+dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
+aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
+hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
+6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
+HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
+0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
+lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
+o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
+gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
+faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
+Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
+jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
+3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign Root CA - G1"
+# Serial: 235931866688319308814040
+# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac
+# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c
+# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67
+-----BEGIN CERTIFICATE-----
+MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
+VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
+ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
+MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
+MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
+Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
+f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
+8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
+d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
+tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
+Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
+o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
+PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
+wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
+GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
+6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
+RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
+iN66zB+Afko=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign ECC Root CA - G3"
+# Serial: 287880440101571086945156
+# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40
+# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1
+# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b
+-----BEGIN CERTIFICATE-----
+MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
+EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
+bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
+RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
+WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
+fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
+zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
+hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
+CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
++JbNR6iC8hZVdyR+EhCVBCyj
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign Root CA - C1"
+# Serial: 825510296613316004955058
+# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68
+# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01
+# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f
+-----BEGIN CERTIFICATE-----
+MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG
+A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg
+SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v
+dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ
+BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ
+HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH
+3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH
+GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c
+xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1
+aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq
+TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87
+/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4
+kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG
+YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT
++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo
+WXzhriKi4gp6D/piq1JM4fHfyr6DDUI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign ECC Root CA - C3"
+# Serial: 582948710642506000014504
+# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5
+# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66
+# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3
+-----BEGIN CERTIFICATE-----
+MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG
+EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx
+IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND
+IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci
+MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti
+sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O
+BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
+Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c
+3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J
+0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Label: "Hongkong Post Root CA 3"
+# Serial: 46170865288971385588281144162979347873371282084
+# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0
+# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02
+# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6
+-----BEGIN CERTIFICATE-----
+MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL
+BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ
+SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n
+a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5
+NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT
+CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u
+Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO
+dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI
+VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV
+9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY
+2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY
+vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt
+bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb
+x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+
+l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK
+TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj
+Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e
+i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw
+DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG
+7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk
+MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr
+gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk
+GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS
+3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm
+Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+
+l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c
+JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP
+L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa
+LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
+mpv0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G4"
+# Serial: 289383649854506086828220374796556676440
+# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88
+# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01
+# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw
+gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL
+Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg
+MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw
+BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0
+MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1
+c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ
+bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg
+Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B
+AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ
+2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E
+T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j
+5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM
+C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T
+DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX
+wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A
+2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm
+nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8
+dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl
+N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj
+c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS
+5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS
+Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr
+hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/
+B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI
+AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw
+H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+
+b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk
+2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol
+IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk
+5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY
+n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft ECC Root Certificate Authority 2017"
+# Serial: 136839042543790627607696632466672567020
+# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67
+# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5
+# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02
+-----BEGIN CERTIFICATE-----
+MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD
+VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw
+MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV
+UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy
+b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR
+ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb
+hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3
+FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV
+L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB
+iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft RSA Root Certificate Authority 2017"
+# Serial: 40975477897264996090493496164228220339
+# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47
+# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74
+# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0
+-----BEGIN CERTIFICATE-----
+MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl
+MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw
+NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5
+IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG
+EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N
+aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ
+Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0
+ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1
+HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm
+gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ
+jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc
+aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG
+YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6
+W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K
+UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH
++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q
+W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC
+LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC
+gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6
+tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh
+SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2
+TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3
+pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR
+xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp
+GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9
+dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN
+AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB
+RA+GsCyRxj3qrg+E
+-----END CERTIFICATE-----
+
+# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Label: "e-Szigno Root CA 2017"
+# Serial: 411379200276854331539784714
+# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98
+# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1
+# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99
+-----BEGIN CERTIFICATE-----
+MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV
+BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk
+LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv
+b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ
+BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg
+THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v
+IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv
+xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H
+Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB
+eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo
+jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ
++efcMQ==
+-----END CERTIFICATE-----
+
+# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Label: "certSIGN Root CA G2"
+# Serial: 313609486401300475190
+# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7
+# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32
+# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05
+-----BEGIN CERTIFICATE-----
+MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV
+BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g
+Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ
+BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ
+R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF
+dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw
+vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ
+uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp
+n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs
+cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW
+xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P
+rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF
+DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx
+DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy
+LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C
+eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ
+d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq
+kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC
+b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl
+qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0
+OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c
+NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk
+ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO
+pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj
+03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk
+PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE
+1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX
+QRBdJ3NghVdJIgc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global Certification Authority"
+# Serial: 1846098327275375458322922162
+# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e
+# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5
+# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8
+-----BEGIN CERTIFICATE-----
+MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw
+CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x
+ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1
+c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx
+OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI
+SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI
+b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn
+swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu
+7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8
+1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW
+80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP
+JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l
+RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw
+hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10
+coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc
+BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n
+twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud
+EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud
+DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W
+0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe
+uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q
+lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB
+aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE
+sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT
+MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe
+qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh
+VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8
+h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9
+EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK
+yeC2nOnOcXHebD8WpHk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global ECC P256 Certification Authority"
+# Serial: 4151900041497450638097112925
+# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54
+# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf
+# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4
+-----BEGIN CERTIFICATE-----
+MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD
+VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
+BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
+YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
+NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G
+A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
+d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
+Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG
+SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN
+FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w
+DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw
+CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh
+DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global ECC P384 Certification Authority"
+# Serial: 2704997926503831671788816187
+# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6
+# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2
+# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97
+-----BEGIN CERTIFICATE-----
+MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD
+VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
+BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
+YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
+NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G
+A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
+d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
+Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ
+j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF
+1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G
+A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3
+AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC
+MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu
+Sw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp.
+# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp.
+# Label: "NAVER Global Root Certification Authority"
+# Serial: 9013692873798656336226253319739695165984492813
+# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b
+# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1
+# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65
+-----BEGIN CERTIFICATE-----
+MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM
+BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG
+T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx
+CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD
+b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA
+iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH
+38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE
+HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz
+kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP
+szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq
+vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf
+nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG
+YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo
+0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a
+CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K
+AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I
+36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB
+Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN
+qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj
+cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm
++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL
+hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe
+lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7
+p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8
+piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR
+LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX
+5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO
+dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul
+9XXeifdy
+-----END CERTIFICATE-----
+
+# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres
+# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres
+# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS"
+# Serial: 131542671362353147877283741781055151509
+# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb
+# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a
+# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb
+-----BEGIN CERTIFICATE-----
+MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw
+CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw
+FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S
+Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5
+MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL
+DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS
+QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH
+sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK
+Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu
+SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC
+MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy
+v+c=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa
+# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa
+# Label: "GlobalSign Root R46"
+# Serial: 1552617688466950547958867513931858518042577
+# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef
+# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90
+# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA
+MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD
+VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy
+MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt
+c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ
+OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG
+vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud
+316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo
+0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE
+y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF
+zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE
++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN
+I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs
+x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa
+ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC
+4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4
+7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg
+JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti
+2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk
+pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF
+FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt
+rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk
+ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5
+u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP
+4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6
+N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3
+vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa
+# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa
+# Label: "GlobalSign Root E46"
+# Serial: 1552617690338932563915843282459653771421763
+# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f
+# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84
+# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58
+-----BEGIN CERTIFICATE-----
+MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx
+CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD
+ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw
+MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex
+HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq
+R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd
+yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ
+7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8
++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
+# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
+# Label: "GLOBALTRUST 2020"
+# Serial: 109160994242082918454945253
+# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8
+# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2
+# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a
+-----BEGIN CERTIFICATE-----
+MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG
+A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw
+FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx
+MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u
+aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq
+hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b
+RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z
+YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3
+QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw
+yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+
+BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ
+SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH
+r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0
+4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me
+dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw
+q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2
+nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu
+H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA
+VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC
+XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd
+6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf
++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi
+kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7
+wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB
+TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C
+MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn
+4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I
+aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy
+qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
+# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
+# Label: "ANF Secure Server Root CA"
+# Serial: 996390341000653745
+# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96
+# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74
+# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99
+-----BEGIN CERTIFICATE-----
+MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV
+BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk
+YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV
+BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN
+MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF
+UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD
+VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v
+dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj
+cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q
+yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH
+2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX
+H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL
+zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR
+p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz
+W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/
+SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn
+LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3
+n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B
+u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj
+o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC
+AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L
+9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej
+rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK
+pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0
+vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq
+OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ
+/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9
+2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI
++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2
+MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo
+tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Label: "Certum EC-384 CA"
+# Serial: 160250656287871593594747141429395092468
+# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1
+# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed
+# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6
+-----BEGIN CERTIFICATE-----
+MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw
+CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw
+JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT
+EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0
+WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT
+LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX
+BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE
+KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm
+Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8
+EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J
+UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn
+nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Root CA"
+# Serial: 40870380103424195783807378461123655149
+# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29
+# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5
+# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd
+-----BEGIN CERTIFICATE-----
+MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6
+MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu
+MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV
+BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw
+MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg
+U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo
+b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ
+n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q
+p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq
+NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF
+8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3
+HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa
+mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi
+7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF
+ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P
+qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ
+v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6
+Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1
+vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD
+ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4
+WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo
+zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR
+5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ
+GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf
+5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq
+0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D
+P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM
+qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP
+0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf
+E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb
+-----END CERTIFICATE-----
+
+# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique
+# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique
+# Label: "TunTrust Root CA"
+# Serial: 108534058042236574382096126452369648152337120275
+# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4
+# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb
+# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41
+-----BEGIN CERTIFICATE-----
+MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL
+BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg
+Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv
+b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG
+EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u
+IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ
+n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd
+2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF
+VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ
+GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF
+li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU
+r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2
+eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb
+MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg
+jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB
+7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW
+5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE
+ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0
+90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z
+xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu
+QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4
+FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH
+22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP
+xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn
+dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5
+Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b
+nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ
+CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH
+u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj
+d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o=
+-----END CERTIFICATE-----
+
+# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Label: "HARICA TLS RSA Root CA 2021"
+# Serial: 76817823531813593706434026085292783742
+# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91
+# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d
+# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d
+-----BEGIN CERTIFICATE-----
+MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs
+MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg
+Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL
+MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl
+YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv
+b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l
+mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE
+4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv
+a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M
+pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw
+Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b
+LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY
+AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB
+AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq
+E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr
+W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ
+CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU
+X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3
+f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja
+H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP
+JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P
+zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt
+jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0
+/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT
+BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79
+aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW
+xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU
+63ZTGI0RmLo=
+-----END CERTIFICATE-----
+
+# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Label: "HARICA TLS ECC Root CA 2021"
+# Serial: 137515985548005187474074462014555733966
+# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0
+# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48
+# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01
+-----BEGIN CERTIFICATE-----
+MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw
+CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh
+cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v
+dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG
+A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj
+aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg
+Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7
+KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y
+STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD
+AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw
+SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN
+nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 1977337328857672817
+# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3
+# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe
+# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1
+MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc
+tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd
+IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j
+b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC
+AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw
+ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m
+iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF
+Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ
+hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P
+Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE
+EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV
+1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t
+CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR
+5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw
+f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9
+ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK
+GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV
+-----END CERTIFICATE-----
+
+# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd.
+# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd.
+# Label: "vTrus ECC Root CA"
+# Serial: 630369271402956006249506845124680065938238527194
+# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85
+# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1
+# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3
+-----BEGIN CERTIFICATE-----
+MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw
+RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY
+BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz
+MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u
+LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0
+v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd
+e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw
+V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA
+AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG
+GJTO
+-----END CERTIFICATE-----
+
+# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd.
+# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd.
+# Label: "vTrus Root CA"
+# Serial: 387574501246983434957692974888460947164905180485
+# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc
+# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7
+# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87
+-----BEGIN CERTIFICATE-----
+MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL
+BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x
+FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx
+MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s
+THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD
+ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc
+IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU
+AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+
+GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9
+8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH
+flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt
+J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim
+0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN
+pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ
+UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW
+OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB
+AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet
+8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd
+nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j
+bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM
+Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv
+TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS
+S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr
+I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9
+b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB
+UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P
+Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven
+sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X2 O=Internet Security Research Group
+# Subject: CN=ISRG Root X2 O=Internet Security Research Group
+# Label: "ISRG Root X2"
+# Serial: 87493402998870891108772069816698636114
+# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5
+# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af
+# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70
+-----BEGIN CERTIFICATE-----
+MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw
+CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg
+R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00
+MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT
+ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw
+EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW
++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9
+ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI
+zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW
+tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1
+/q4AaOeMSQ+2b1tbFfLn
+-----END CERTIFICATE-----
+
+# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd.
+# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd.
+# Label: "HiPKI Root CA - G1"
+# Serial: 60966262342023497858655262305426234976
+# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3
+# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60
+# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc
+-----BEGIN CERTIFICATE-----
+MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa
+Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3
+YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw
+qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv
+Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6
+lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz
+Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ
+KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK
+FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj
+HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr
+y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ
+/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM
+a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6
+fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG
+SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi
+7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc
+SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza
+ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc
+XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg
+iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho
+L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF
+Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr
+kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+
+vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU
+YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 159662223612894884239637590694
+# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc
+# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28
+# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2
+-----BEGIN CERTIFICATE-----
+MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD
+VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw
+MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g
+UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT
+BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx
+uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV
+HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/
++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147
+bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
+# Subject: CN=GTS Root R1 O=Google Trust Services LLC
+# Label: "GTS Root R1"
+# Serial: 159662320309726417404178440727
+# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40
+# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a
+# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf
+-----BEGIN CERTIFICATE-----
+MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo
+27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w
+Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw
+TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl
+qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH
+szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8
+Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk
+MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92
+wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p
+aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN
+VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID
+AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
+FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb
+C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe
+QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy
+h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4
+7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J
+ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef
+MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/
+Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT
+6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ
+0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm
+2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb
+bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
+# Subject: CN=GTS Root R2 O=Google Trust Services LLC
+# Label: "GTS Root R2"
+# Serial: 159662449406622349769042896298
+# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc
+# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94
+# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8
+-----BEGIN CERTIFICATE-----
+MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt
+nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY
+6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu
+MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k
+RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg
+f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV
++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo
+dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW
+Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa
+G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq
+gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID
+AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
+FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H
+vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8
+0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC
+B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u
+NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg
+yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev
+HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6
+xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR
+TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg
+JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV
+7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl
+6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
+# Subject: CN=GTS Root R3 O=Google Trust Services LLC
+# Label: "GTS Root R3"
+# Serial: 159662495401136852707857743206
+# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73
+# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46
+# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48
+-----BEGIN CERTIFICATE-----
+MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD
+VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG
+A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw
+WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz
+IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G
+jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2
+4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7
+VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm
+ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
+# Subject: CN=GTS Root R4 O=Google Trust Services LLC
+# Label: "GTS Root R4"
+# Serial: 159662532700760215368942768210
+# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8
+# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47
+# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d
+-----BEGIN CERTIFICATE-----
+MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD
+VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG
+A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw
+WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz
+IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi
+QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR
+HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D
+9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8
+p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD
+-----END CERTIFICATE-----
+
+# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj
+# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj
+# Label: "Telia Root CA v2"
+# Serial: 7288924052977061235122729490515358
+# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48
+# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd
+# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx
+CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE
+AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1
+NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ
+MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq
+AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9
+vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9
+lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD
+n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT
+7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o
+6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC
+TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6
+WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R
+DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI
+pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj
+YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy
+rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ
+8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi
+0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM
+A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS
+SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K
+TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF
+6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er
+3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt
+Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT
+VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW
+ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA
+rBPuUBQemMc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH
+# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH
+# Label: "D-TRUST BR Root CA 1 2020"
+# Serial: 165870826978392376648679885835942448534
+# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed
+# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67
+# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44
+-----BEGIN CERTIFICATE-----
+MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw
+CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS
+VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5
+NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG
+A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS
+zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0
+QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/
+VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g
+PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf
+Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l
+dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1
+c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO
+PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW
+wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV
+dWNbFJWcHwHP2NVypw87
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH
+# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH
+# Label: "D-TRUST EV Root CA 1 2020"
+# Serial: 126288379621884218666039612629459926992
+# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e
+# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07
+# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db
+-----BEGIN CERTIFICATE-----
+MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw
+CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS
+VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5
+NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG
+A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC
+/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD
+wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3
+OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g
+PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf
+Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l
+dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1
+c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO
+PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA
+y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb
+gfM0agPnIjhQW+0ZT0MW
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc.
+# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc.
+# Label: "DigiCert TLS ECC P384 Root G5"
+# Serial: 13129116028163249804115411775095713523
+# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed
+# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee
+# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05
+-----BEGIN CERTIFICATE-----
+MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp
+Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2
+MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ
+bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG
+ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS
+7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp
+0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS
+B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49
+BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ
+LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4
+DXZDjC5Ty3zfDBeWUA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc.
+# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc.
+# Label: "DigiCert TLS RSA4096 Root G5"
+# Serial: 11930366277458970227240571539258396554
+# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1
+# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35
+# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN
+MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT
+HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN
+NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs
+IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+
+ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0
+2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp
+wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM
+pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD
+nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po
+sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx
+Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd
+Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX
+KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe
+XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL
+tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv
+TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN
+AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw
+GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H
+PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF
+O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ
+REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik
+AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv
+/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+
+p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw
+MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF
+qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK
+ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certainly Root R1 O=Certainly
+# Subject: CN=Certainly Root R1 O=Certainly
+# Label: "Certainly Root R1"
+# Serial: 188833316161142517227353805653483829216
+# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12
+# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af
+# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0
+-----BEGIN CERTIFICATE-----
+MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw
+PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy
+dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0
+YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2
+1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT
+vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed
+aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0
+1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5
+r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5
+cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ
+wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ
+6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA
+2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH
+Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR
+eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB
+/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u
+d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr
+PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d
+8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi
+1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd
+rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di
+taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7
+lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj
+yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn
+Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy
+yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n
+wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6
+OV+KmalBWQewLK8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certainly Root E1 O=Certainly
+# Subject: CN=Certainly Root E1 O=Certainly
+# Label: "Certainly Root E1"
+# Serial: 8168531406727139161245376702891150584
+# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9
+# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b
+# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2
+-----BEGIN CERTIFICATE-----
+MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw
+CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu
+bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ
+BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s
+eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK
++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2
+QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4
+hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm
+ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG
+BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR
+-----END CERTIFICATE-----
+
+# Issuer: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
+# Subject: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
+# Label: "E-Tugra Global Root CA RSA v3"
+# Serial: 75951268308633135324246244059508261641472512052
+# MD5 Fingerprint: 22:be:10:f6:c2:f8:03:88:73:5f:33:29:47:28:47:a4
+# SHA1 Fingerprint: e9:a8:5d:22:14:52:1c:5b:aa:0a:b4:be:24:6a:23:8a:c9:ba:e2:a9
+# SHA256 Fingerprint: ef:66:b0:b1:0a:3c:db:9f:2e:36:48:c7:6b:d2:af:18:ea:d2:bf:e6:f1:17:65:5e:28:c4:06:0d:a1:a3:f4:c2
+-----BEGIN CERTIFICATE-----
+MIIF8zCCA9ugAwIBAgIUDU3FzRYilZYIfrgLfxUGNPt5EDQwDQYJKoZIhvcNAQEL
+BQAwgYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUt
+VHVncmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYw
+JAYDVQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIFJTQSB2MzAeFw0yMDAzMTgw
+OTA3MTdaFw00NTAzMTIwOTA3MTdaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMG
+QW5rYXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1
+Z3JhIFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBD
+QSBSU0EgdjMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCiZvCJt3J7
+7gnJY9LTQ91ew6aEOErxjYG7FL1H6EAX8z3DeEVypi6Q3po61CBxyryfHUuXCscx
+uj7X/iWpKo429NEvx7epXTPcMHD4QGxLsqYxYdE0PD0xesevxKenhOGXpOhL9hd8
+7jwH7eKKV9y2+/hDJVDqJ4GohryPUkqWOmAalrv9c/SF/YP9f4RtNGx/ardLAQO/
+rWm31zLZ9Vdq6YaCPqVmMbMWPcLzJmAy01IesGykNz709a/r4d+ABs8qQedmCeFL
+l+d3vSFtKbZnwy1+7dZ5ZdHPOrbRsV5WYVB6Ws5OUDGAA5hH5+QYfERaxqSzO8bG
+wzrwbMOLyKSRBfP12baqBqG3q+Sx6iEUXIOk/P+2UNOMEiaZdnDpwA+mdPy70Bt4
+znKS4iicvObpCdg604nmvi533wEKb5b25Y08TVJ2Glbhc34XrD2tbKNSEhhw5oBO
+M/J+JjKsBY04pOZ2PJ8QaQ5tndLBeSBrW88zjdGUdjXnXVXHt6woq0bM5zshtQoK
+5EpZ3IE1S0SVEgpnpaH/WwAH0sDM+T/8nzPyAPiMbIedBi3x7+PmBvrFZhNb/FAH
+nnGGstpvdDDPk1Po3CLW3iAfYY2jLqN4MpBs3KwytQXk9TwzDdbgh3cXTJ2w2Amo
+DVf3RIXwyAS+XF1a4xeOVGNpf0l0ZAWMowIDAQABo2MwYTAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFLK0ruYt9ybVqnUtdkvAG1Mh0EjvMB0GA1UdDgQWBBSy
+tK7mLfcm1ap1LXZLwBtTIdBI7zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEL
+BQADggIBAImocn+M684uGMQQgC0QDP/7FM0E4BQ8Tpr7nym/Ip5XuYJzEmMmtcyQ
+6dIqKe6cLcwsmb5FJ+Sxce3kOJUxQfJ9emN438o2Fi+CiJ+8EUdPdk3ILY7r3y18
+Tjvarvbj2l0Upq7ohUSdBm6O++96SmotKygY/r+QLHUWnw/qln0F7psTpURs+APQ
+3SPh/QMSEgj0GDSz4DcLdxEBSL9htLX4GdnLTeqjjO/98Aa1bZL0SmFQhO3sSdPk
+vmjmLuMxC1QLGpLWgti2omU8ZgT5Vdps+9u1FGZNlIM7zR6mK7L+d0CGq+ffCsn9
+9t2HVhjYsCxVYJb6CH5SkPVLpi6HfMsg2wY+oF0Dd32iPBMbKaITVaA9FCKvb7jQ
+mhty3QUBjYZgv6Rn7rWlDdF/5horYmbDB7rnoEgcOMPpRfunf/ztAmgayncSd6YA
+VSgU7NbHEqIbZULpkejLPoeJVF3Zr52XnGnnCv8PWniLYypMfUeUP95L6VPQMPHF
+9p5J3zugkaOj/s1YzOrfr28oO6Bpm4/srK4rVJ2bBLFHIK+WEj5jlB0E5y67hscM
+moi/dkfv97ALl2bSRM9gUgfh1SxKOidhd8rXj+eHDjD/DLsE4mHDosiXYY60MGo8
+bcIHX0pzLz/5FooBZu+6kcpSV3uu1OYP3Qt6f4ueJiDPO++BcYNZ
+-----END CERTIFICATE-----
+
+# Issuer: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
+# Subject: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
+# Label: "E-Tugra Global Root CA ECC v3"
+# Serial: 218504919822255052842371958738296604628416471745
+# MD5 Fingerprint: 46:bc:81:bb:f1:b5:1e:f7:4b:96:bc:14:e2:e7:27:64
+# SHA1 Fingerprint: 8a:2f:af:57:53:b1:b0:e6:a1:04:ec:5b:6a:69:71:6d:f6:1c:e2:84
+# SHA256 Fingerprint: 87:3f:46:85:fa:7f:56:36:25:25:2e:6d:36:bc:d7:f1:6f:c2:49:51:f2:64:e4:7e:1b:95:4f:49:08:cd:ca:13
+-----BEGIN CERTIFICATE-----
+MIICpTCCAiqgAwIBAgIUJkYZdzHhT28oNt45UYbm1JeIIsEwCgYIKoZIzj0EAwMw
+gYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUtVHVn
+cmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYwJAYD
+VQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIEVDQyB2MzAeFw0yMDAzMTgwOTQ2
+NThaFw00NTAzMTIwOTQ2NThaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMGQW5r
+YXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1Z3Jh
+IFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBDQSBF
+Q0MgdjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASOmCm/xxAeJ9urA8woLNheSBkQ
+KczLWYHMjLiSF4mDKpL2w6QdTGLVn9agRtwcvHbB40fQWxPa56WzZkjnIZpKT4YK
+fWzqTTKACrJ6CZtpS5iB4i7sAnCWH/31Rs7K3IKjYzBhMA8GA1UdEwEB/wQFMAMB
+Af8wHwYDVR0jBBgwFoAU/4Ixcj75xGZsrTie0bBRiKWQzPUwHQYDVR0OBBYEFP+C
+MXI++cRmbK04ntGwUYilkMz1MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNp
+ADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/6
+7W4WAie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFx
+vmjkI6TZraE3
+-----END CERTIFICATE-----
+
+# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
+# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
+# Label: "Security Communication RootCA3"
+# Serial: 16247922307909811815
+# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26
+# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a
+# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94
+-----BEGIN CERTIFICATE-----
+MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV
+BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw
+JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2
+MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc
+U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg
+Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r
+CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA
+lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG
+TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7
+9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7
+8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4
+g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we
+GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst
++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M
+0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ
+T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw
+HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS
+YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA
+FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd
+9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI
+UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+
+OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke
+gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf
+iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV
+nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD
+2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI//
+1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad
+TdJ0MN1kURXbg4NR16/9M51NZg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
+# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
+# Label: "Security Communication ECC RootCA1"
+# Serial: 15446673492073852651
+# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86
+# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41
+# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11
+-----BEGIN CERTIFICATE-----
+MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT
+AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD
+VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx
+NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT
+HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5
+IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl
+dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK
+ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu
+9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O
+be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k=
+-----END CERTIFICATE-----
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/certifi/core.py b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/core.py
new file mode 100644
index 0000000..c3e5466
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/certifi/core.py
@@ -0,0 +1,108 @@
+"""
+certifi.py
+~~~~~~~~~~
+
+This module returns the installation location of cacert.pem or its contents.
+"""
+import sys
+
+
+if sys.version_info >= (3, 11):
+
+    from importlib.resources import as_file, files
+
+    _CACERT_CTX = None
+    _CACERT_PATH = None
+
+    def where() -> str:
+        # This is slightly terrible, but we want to delay extracting the file
+        # in cases where we're inside of a zipimport situation until someone
+        # actually calls where(), but we don't want to re-extract the file
+        # on every call of where(), so we'll do it once then store it in a
+        # global variable.
+        global _CACERT_CTX
+        global _CACERT_PATH
+        if _CACERT_PATH is None:
+            # This is slightly janky, the importlib.resources API wants you to
+            # manage the cleanup of this file, so it doesn't actually return a
+            # path, it returns a context manager that will give you the path
+            # when you enter it and will do any cleanup when you leave it. In
+            # the common case of not needing a temporary file, it will just
+            # return the file system location and the __exit__() is a no-op.
+            #
+            # We also have to hold onto the actual context manager, because
+            # it will do the cleanup whenever it gets garbage collected, so
+            # we will also store that at the global level as well.
+            _CACERT_CTX = as_file(files("pip._vendor.certifi").joinpath("cacert.pem"))
+            _CACERT_PATH = str(_CACERT_CTX.__enter__())
+
+        return _CACERT_PATH
+
+    def contents() -> str:
+        return files("pip._vendor.certifi").joinpath("cacert.pem").read_text(encoding="ascii")
+
+elif sys.version_info >= (3, 7):
+
+    from importlib.resources import path as get_path, read_text
+
+    _CACERT_CTX = None
+    _CACERT_PATH = None
+
+    def where() -> str:
+        # This is slightly terrible, but we want to delay extracting the
+        # file in cases where we're inside of a zipimport situation until
+        # someone actually calls where(), but we don't want to re-extract
+        # the file on every call of where(), so we'll do it once then store
+        # it in a global variable.
+        global _CACERT_CTX
+        global _CACERT_PATH
+        if _CACERT_PATH is None:
+            # This is slightly janky, the importlib.resources API wants you
+            # to manage the cleanup of this file, so it doesn't actually
+            # return a path, it returns a context manager that will give
+            # you the path when you enter it and will do any cleanup when
+            # you leave it. In the common case of not needing a temporary
+            # file, it will just return the file system location and the
+            # __exit__() is a no-op.
+            #
+            # We also have to hold onto the actual context manager, because
+            # it will do the cleanup whenever it gets garbage collected, so
+            # we will also store that at the global level as well.
+            _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem")
+            _CACERT_PATH = str(_CACERT_CTX.__enter__())
+
+        return _CACERT_PATH
+
+    def contents() -> str:
+        return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii")
+
+else:
+    import os
+    import types
+    from typing import Union
+
+    Package = Union[types.ModuleType, str]
+    Resource = Union[str, "os.PathLike"]
+
+    # This fallback will work for Python versions prior to 3.7 that lack the
+    # importlib.resources module but relies on the existing `where` function
+    # so won't address issues with environments like PyOxidizer that don't set
+    # __file__ on modules.
+    def read_text(
+        package: Package,
+        resource: Resource,
+        encoding: str = 'utf-8',
+        errors: str = 'strict'
+    ) -> str:
+        with open(where(), encoding=encoding) as data:
+            return data.read()
+
+    # If we don't have importlib.resources, then we will just do the old logic
+    # of assuming we're on the filesystem and munge the path directly.
+    def where() -> str:
+        f = os.path.dirname(__file__)
+
+        return os.path.join(f, "cacert.pem")
+
+    def contents() -> str:
+        return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii")
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__init__.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__init__.py
new file mode 100644
index 0000000..e91ad61
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__init__.py
@@ -0,0 +1,93 @@
+######################## BEGIN LICENSE BLOCK ########################
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .enums import InputState
+from .universaldetector import UniversalDetector
+from .version import VERSION, __version__
+
+__all__ = ["UniversalDetector", "detect", "detect_all", "__version__", "VERSION"]
+
+
+def detect(byte_str):
+    """
+    Detect the encoding of the given byte string.
+
+    :param byte_str:     The byte sequence to examine.
+    :type byte_str:      ``bytes`` or ``bytearray``
+    """
+    if not isinstance(byte_str, bytearray):
+        if not isinstance(byte_str, bytes):
+            raise TypeError(
+                f"Expected object of type bytes or bytearray, got: {type(byte_str)}"
+            )
+        byte_str = bytearray(byte_str)
+    detector = UniversalDetector()
+    detector.feed(byte_str)
+    return detector.close()
+
+
+def detect_all(byte_str, ignore_threshold=False):
+    """
+    Detect all the possible encodings of the given byte string.
+
+    :param byte_str:          The byte sequence to examine.
+    :type byte_str:           ``bytes`` or ``bytearray``
+    :param ignore_threshold:  Include encodings that are below
+                              ``UniversalDetector.MINIMUM_THRESHOLD``
+                              in results.
+    :type ignore_threshold:   ``bool``
+    """
+    if not isinstance(byte_str, bytearray):
+        if not isinstance(byte_str, bytes):
+            raise TypeError(
+                f"Expected object of type bytes or bytearray, got: {type(byte_str)}"
+            )
+        byte_str = bytearray(byte_str)
+
+    detector = UniversalDetector()
+    detector.feed(byte_str)
+    detector.close()
+
+    if detector.input_state == InputState.HIGH_BYTE:
+        results = []
+        probers = []
+        for prober in detector.charset_probers:
+            if hasattr(prober, "probers"):
+                probers.extend(p for p in prober.probers)
+            else:
+                probers.append(prober)
+        for prober in probers:
+            if ignore_threshold or prober.get_confidence() > detector.MINIMUM_THRESHOLD:
+                charset_name = prober.charset_name or ""
+                lower_charset_name = charset_name.lower()
+                # Use Windows encoding name instead of ISO-8859 if we saw any
+                # extra Windows-specific bytes
+                if lower_charset_name.startswith("iso-8859") and detector.has_win_bytes:
+                    charset_name = detector.ISO_WIN_MAP.get(
+                        lower_charset_name, charset_name
+                    )
+                results.append(
+                    {
+                        "encoding": charset_name,
+                        "confidence": prober.get_confidence(),
+                        "language": prober.language,
+                    }
+                )
+        if len(results) > 0:
+            return sorted(results, key=lambda result: -result["confidence"])
+
+    return [detector.result]
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..242005f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc
new file mode 100644
index 0000000..0521d8a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc
new file mode 100644
index 0000000..3e4744c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc
new file mode 100644
index 0000000..af91f94
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc
new file mode 100644
index 0000000..add74ed
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc
new file mode 100644
index 0000000..1afaf91
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc
new file mode 100644
index 0000000..5b2a70f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc
new file mode 100644
index 0000000..8483ef0
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc
new file mode 100644
index 0000000..e053691
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc
new file mode 100644
index 0000000..2f43d49
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc
new file mode 100644
index 0000000..e5931ac
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc
new file mode 100644
index 0000000..6fc4796
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc
new file mode 100644
index 0000000..83a1f7c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc
new file mode 100644
index 0000000..eaaf907
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc
new file mode 100644
index 0000000..39d52b8
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc
new file mode 100644
index 0000000..8558746
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc
new file mode 100644
index 0000000..187752c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc
new file mode 100644
index 0000000..7f5f132
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc
new file mode 100644
index 0000000..6e41cdd
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc
new file mode 100644
index 0000000..3a6a12f
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-39.pyc
new file mode 100644
index 0000000..e9b6177
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/johabfreq.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-39.pyc
new file mode 100644
index 0000000..1498087
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/johabprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc
new file mode 100644
index 0000000..3e7ba1c
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc
new file mode 100644
index 0000000..998dc34
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc
new file mode 100644
index 0000000..90c5245
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc
new file mode 100644
index 0000000..3027ceb
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc
new file mode 100644
index 0000000..deac718
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc
new file mode 100644
index 0000000..49f2b79
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc
new file mode 100644
index 0000000..38d8213
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc
new file mode 100644
index 0000000..39c6206
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc
new file mode 100644
index 0000000..5183eb3
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc
new file mode 100644
index 0000000..a4dd93b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc
new file mode 100644
index 0000000..f7d7de8
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc
new file mode 100644
index 0000000..be8ae0b
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc
new file mode 100644
index 0000000..bbeb68e
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc
new file mode 100644
index 0000000..b34f8ff
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc
new file mode 100644
index 0000000..9f14de1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc
new file mode 100644
index 0000000..ab0ba5d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-39.pyc
new file mode 100644
index 0000000..6c7e113
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/utf1632prober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc
new file mode 100644
index 0000000..d871769
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-39.pyc
new file mode 100644
index 0000000..7e82bdb
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5freq.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5freq.py
new file mode 100644
index 0000000..87d9f97
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5freq.py
@@ -0,0 +1,386 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+# Big5 frequency table
+# by Taiwan's Mandarin Promotion Council
+# <http://www.edu.tw:81/mandr/>
+#
+# 128  --> 0.42261
+# 256  --> 0.57851
+# 512  --> 0.74851
+# 1024 --> 0.89384
+# 2048 --> 0.97583
+#
+# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
+# Random Distribution Ration = 512/(5401-512)=0.105
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
+
+BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
+
+# Char to FreqOrder table
+BIG5_TABLE_SIZE = 5376
+# fmt: off
+BIG5_CHAR_TO_FREQ_ORDER = (
+   1,1801,1506, 255,1431, 198,   9,  82,   6,5008, 177, 202,3681,1256,2821, 110, #   16
+3814,  33,3274, 261,  76,  44,2114,  16,2946,2187,1176, 659,3971,  26,3451,2653, #   32
+1198,3972,3350,4202, 410,2215, 302, 590, 361,1964,   8, 204,  58,4510,5009,1932, #   48
+  63,5010,5011, 317,1614,  75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, #   64
+3682,   3,  10,3973,1471,  29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, #   80
+4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947,  34,3556,3204,  64, 604, #   96
+5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337,  72, 406,5017,  80, #  112
+ 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449,  69,2987, 591, #  128
+ 179,2096, 471, 115,2035,1844,  60,  50,2988, 134, 806,1869, 734,2036,3454, 180, #  144
+ 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, #  160
+2502,  90,2716,1338, 663,  11, 906,1099,2553,  20,2441, 182, 532,1716,5019, 732, #  176
+1376,4204,1311,1420,3206,  25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, #  192
+3276, 475,1447,3683,5020, 117,  21, 656, 810,1297,2300,2334,3557,5021, 126,4205, #  208
+ 706, 456, 150, 613,4513,  71,1118,2037,4206, 145,3092,  85, 835, 486,2115,1246, #  224
+1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, #  240
+3558,3135,5023,1956,1153,4207,  83, 296,1199,3093, 192, 624,  93,5024, 822,1898, #  256
+2823,3136, 795,2065, 991,1554,1542,1592,  27,  43,2867, 859, 139,1456, 860,4514, #  272
+ 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, #  288
+3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, #  304
+1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, #  320
+5026,5027,2176,3207,3685,2682, 593, 845,1062,3277,  88,1723,2038,3978,1951, 212, #  336
+ 266, 152, 149, 468,1899,4208,4516,  77, 187,5028,3038,  37,   5,2990,5029,3979, #  352
+5030,5031,  39,2524,4517,2908,3208,2079,  55, 148,  74,4518, 545, 483,1474,1029, #  368
+1665, 217,1870,1531,3138,1104,2655,4209,  24, 172,3562, 900,3980,3563,3564,4519, #  384
+  32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683,   4,3039,3351,1427,1789, #  400
+ 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, #  416
+3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439,  38,5037,1063,5038, 794, #  432
+3982,1435,2301,  46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804,  35, 707, #  448
+ 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, #  464
+2129,1363,3689,1423, 697, 100,3094,  48,  70,1231, 495,3139,2196,5043,1294,5044, #  480
+2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, #  496
+ 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, #  512
+ 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, #  528
+3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, #  544
+1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, #  560
+1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, #  576
+1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381,   7, #  592
+2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, #  608
+ 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, #  624
+4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, #  640
+1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, #  656
+5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, #  672
+2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, #  688
+ 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, #  704
+  98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, #  720
+ 523,2789,2790,2658,5061, 141,2235,1333,  68, 176, 441, 876, 907,4220, 603,2602, #  736
+ 710, 171,3464, 404, 549,  18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, #  752
+5063,2991, 368,5064, 146, 366,  99, 871,3693,1543, 748, 807,1586,1185,  22,2263, #  768
+ 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, #  784
+1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068,  59,5069, #  800
+ 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, #  816
+ 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, #  832
+5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, #  848
+1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, #  864
+ 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, #  880
+3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, #  896
+4224,  57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, #  912
+3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, #  928
+ 279,3145,  51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, #  944
+ 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, #  960
+1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, #  976
+4227,2475,1436, 953,4228,2055,4545, 671,2400,  79,4229,2446,3285, 608, 567,2689, #  992
+3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
+3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
+2402,5097,5098,5099,4232,3045,   0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
+5101, 233,4233,3697,1819,4550,4551,5102,  96,1777,1315,2083,5103, 257,5104,1810, # 1056
+3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
+5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
+1484,5110,1712, 127,  67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
+2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
+1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
+  78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
+1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
+4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
+3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
+ 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
+ 165, 243,4559,3703,2528, 123, 683,4239, 764,4560,  36,3998,1793, 589,2916, 816, # 1232
+ 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
+2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
+5122, 611,1156, 854,2386,1316,2875,   2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
+1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
+2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
+1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
+1994,5135,4564,5136,5137,2198,  13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
+5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
+5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
+5149, 128,2133,  92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
+3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
+4567,2252,  94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
+4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
+2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
+5163,2337,2068,  23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
+3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
+ 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
+5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863,  41, # 1520
+5170,5171,4575,5172,1657,2338,  19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
+1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
+2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
+3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
+4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
+5182,2692, 733,  40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
+3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
+4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
+1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
+1871,2762,3004,5187, 435,5188, 343,1108, 596,  17,1751,4579,2239,3477,3709,5189, # 1680
+4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
+1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
+ 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
+1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
+1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
+3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
+ 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
+5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
+2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
+1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
+1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551,  30,2268,4266, # 1856
+5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
+ 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
+4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
+ 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
+2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
+ 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
+1041,3005, 293,1168,  87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
+1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
+ 730,1515, 184,2840,  66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
+4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
+4021,5231,5232,1186,  15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
+1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
+3596,1342,1681,1718, 766,3297, 286,  89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
+5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
+5240,3298, 310, 313,3482,2304, 770,4278,  54,3054, 189,4611,3105,3848,4025,5241, # 2096
+1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
+2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
+1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
+3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
+2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
+3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
+2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
+4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
+4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
+3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
+  97,  81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
+3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
+ 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
+3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
+4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
+3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
+1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
+5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
+ 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
+5286, 587,  14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
+1702,1226, 102,1547,  62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
+ 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
+4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294,  86,1494,1730, # 2464
+4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
+ 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
+2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
+2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885,  28,2695, # 2528
+3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
+1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
+4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
+2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
+1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
+1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
+2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
+3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
+1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
+5313,3493,5314,5315,5316,3310,2698,1433,3311, 131,  95,1504,4049, 723,4303,3166, # 2688
+1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
+4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654,  53,5320,3014,5321, # 2720
+1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
+ 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
+1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
+4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
+4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
+2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
+1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
+4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
+ 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
+5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
+2322,3316,5346,5347,4308,5348,4309,  84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
+3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
+4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
+ 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
+5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
+5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
+1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
+4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
+4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
+2699,1516,3614,1121,1082,1329,3317,4073,1449,3873,  65,1128,2848,2927,2769,1590, # 3040
+3874,5370,5371,  12,2668,  45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
+3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
+2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
+1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
+4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
+3736,1859,  91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
+3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
+2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
+4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771,  61,4079,3738,1823,4080, # 3184
+5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
+3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
+2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
+3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
+1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
+2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
+3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
+4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063,  56,1396,3113, # 3312
+2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
+2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
+5418,1076,  49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
+1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
+2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
+1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
+3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
+4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629,  31,2851, # 3440
+2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
+3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
+3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
+2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
+4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
+2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
+3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
+4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
+5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
+3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
+ 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
+1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412,  42,3119, 464,5455,2642, # 3632
+4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
+1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
+4701,5462,3020, 962, 588,3629, 289,3250,2644,1116,  52,5463,3067,1797,5464,5465, # 3680
+5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
+ 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
+5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
+5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
+2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
+3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
+2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
+2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
+ 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
+1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
+4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
+3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
+3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
+ 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
+2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
+ 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
+2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
+4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
+1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
+4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
+1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
+3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
+ 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
+3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
+5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
+5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
+3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
+3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
+1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
+2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
+5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
+1561,2674,1452,4113,1375,5549,5550,  47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
+1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
+3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
+ 919,2352,2975,2353,1270,4727,4115,  73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
+1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
+4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
+5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
+2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
+3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
+ 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
+1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
+2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
+2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
+5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
+5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
+5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
+2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
+2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
+1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
+4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
+3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
+3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
+4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
+4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
+2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
+2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
+5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
+4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
+5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
+4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
+ 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
+ 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
+1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
+3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
+4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
+1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
+5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
+2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
+2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
+3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
+5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
+1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
+3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
+5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
+1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
+5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
+2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
+3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
+2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
+3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
+3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
+3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
+4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
+ 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
+2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
+4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
+3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
+5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
+1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
+5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
+ 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
+1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
+ 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
+4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
+1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
+4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
+1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
+ 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
+3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
+4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
+5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
+ 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
+3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
+ 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
+2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376
+)
+# fmt: on
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5prober.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5prober.py
new file mode 100644
index 0000000..e4dfa7a
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/big5prober.py
@@ -0,0 +1,47 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .chardistribution import Big5DistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import BIG5_SM_MODEL
+
+
+class Big5Prober(MultiByteCharSetProber):
+    def __init__(self):
+        super().__init__()
+        self.coding_sm = CodingStateMachine(BIG5_SM_MODEL)
+        self.distribution_analyzer = Big5DistributionAnalysis()
+        self.reset()
+
+    @property
+    def charset_name(self):
+        return "Big5"
+
+    @property
+    def language(self):
+        return "Chinese"
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/chardistribution.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/chardistribution.py
new file mode 100644
index 0000000..27b4a29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/chardistribution.py
@@ -0,0 +1,259 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .big5freq import (
+    BIG5_CHAR_TO_FREQ_ORDER,
+    BIG5_TABLE_SIZE,
+    BIG5_TYPICAL_DISTRIBUTION_RATIO,
+)
+from .euckrfreq import (
+    EUCKR_CHAR_TO_FREQ_ORDER,
+    EUCKR_TABLE_SIZE,
+    EUCKR_TYPICAL_DISTRIBUTION_RATIO,
+)
+from .euctwfreq import (
+    EUCTW_CHAR_TO_FREQ_ORDER,
+    EUCTW_TABLE_SIZE,
+    EUCTW_TYPICAL_DISTRIBUTION_RATIO,
+)
+from .gb2312freq import (
+    GB2312_CHAR_TO_FREQ_ORDER,
+    GB2312_TABLE_SIZE,
+    GB2312_TYPICAL_DISTRIBUTION_RATIO,
+)
+from .jisfreq import (
+    JIS_CHAR_TO_FREQ_ORDER,
+    JIS_TABLE_SIZE,
+    JIS_TYPICAL_DISTRIBUTION_RATIO,
+)
+from .johabfreq import JOHAB_TO_EUCKR_ORDER_TABLE
+
+
+class CharDistributionAnalysis:
+    ENOUGH_DATA_THRESHOLD = 1024
+    SURE_YES = 0.99
+    SURE_NO = 0.01
+    MINIMUM_DATA_THRESHOLD = 3
+
+    def __init__(self):
+        # Mapping table to get frequency order from char order (get from
+        # GetOrder())
+        self._char_to_freq_order = tuple()
+        self._table_size = None  # Size of above table
+        # This is a constant value which varies from language to language,
+        # used in calculating confidence.  See
+        # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
+        # for further detail.
+        self.typical_distribution_ratio = None
+        self._done = None
+        self._total_chars = None
+        self._freq_chars = None
+        self.reset()
+
+    def reset(self):
+        """reset analyser, clear any state"""
+        # If this flag is set to True, detection is done and conclusion has
+        # been made
+        self._done = False
+        self._total_chars = 0  # Total characters encountered
+        # The number of characters whose frequency order is less than 512
+        self._freq_chars = 0
+
+    def feed(self, char, char_len):
+        """feed a character with known length"""
+        if char_len == 2:
+            # we only care about 2-bytes character in our distribution analysis
+            order = self.get_order(char)
+        else:
+            order = -1
+        if order >= 0:
+            self._total_chars += 1
+            # order is valid
+            if order < self._table_size:
+                if 512 > self._char_to_freq_order[order]:
+                    self._freq_chars += 1
+
+    def get_confidence(self):
+        """return confidence based on existing data"""
+        # if we didn't receive any character in our consideration range,
+        # return negative answer
+        if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD:
+            return self.SURE_NO
+
+        if self._total_chars != self._freq_chars:
+            r = self._freq_chars / (
+                (self._total_chars - self._freq_chars) * self.typical_distribution_ratio
+            )
+            if r < self.SURE_YES:
+                return r
+
+        # normalize confidence (we don't want to be 100% sure)
+        return self.SURE_YES
+
+    def got_enough_data(self):
+        # It is not necessary to receive all data to draw conclusion.
+        # For charset detection, certain amount of data is enough
+        return self._total_chars > self.ENOUGH_DATA_THRESHOLD
+
+    def get_order(self, _):
+        # We do not handle characters based on the original encoding string,
+        # but convert this encoding string to a number, here called order.
+        # This allows multiple encodings of a language to share one frequency
+        # table.
+        return -1
+
+
+class EUCTWDistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self):
+        super().__init__()
+        self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER
+        self._table_size = EUCTW_TABLE_SIZE
+        self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str):
+        # for euc-TW encoding, we are interested
+        #   first  byte range: 0xc4 -- 0xfe
+        #   second byte range: 0xa1 -- 0xfe
+        # no validation needed here. State machine has done that
+        first_char = byte_str[0]
+        if first_char >= 0xC4:
+            return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1
+        return -1
+
+
+class EUCKRDistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self):
+        super().__init__()
+        self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
+        self._table_size = EUCKR_TABLE_SIZE
+        self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str):
+        # for euc-KR encoding, we are interested
+        #   first  byte range: 0xb0 -- 0xfe
+        #   second byte range: 0xa1 -- 0xfe
+        # no validation needed here. State machine has done that
+        first_char = byte_str[0]
+        if first_char >= 0xB0:
+            return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1
+        return -1
+
+
+class JOHABDistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self):
+        super().__init__()
+        self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
+        self._table_size = EUCKR_TABLE_SIZE
+        self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str):
+        first_char = byte_str[0]
+        if 0x88 <= first_char < 0xD4:
+            code = first_char * 256 + byte_str[1]
+            return JOHAB_TO_EUCKR_ORDER_TABLE.get(code, -1)
+        return -1
+
+
+class GB2312DistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self):
+        super().__init__()
+        self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER
+        self._table_size = GB2312_TABLE_SIZE
+        self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str):
+        # for GB2312 encoding, we are interested
+        #  first  byte range: 0xb0 -- 0xfe
+        #  second byte range: 0xa1 -- 0xfe
+        # no validation needed here. State machine has done that
+        first_char, second_char = byte_str[0], byte_str[1]
+        if (first_char >= 0xB0) and (second_char >= 0xA1):
+            return 94 * (first_char - 0xB0) + second_char - 0xA1
+        return -1
+
+
+class Big5DistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self):
+        super().__init__()
+        self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER
+        self._table_size = BIG5_TABLE_SIZE
+        self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str):
+        # for big5 encoding, we are interested
+        #   first  byte range: 0xa4 -- 0xfe
+        #   second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
+        # no validation needed here. State machine has done that
+        first_char, second_char = byte_str[0], byte_str[1]
+        if first_char >= 0xA4:
+            if second_char >= 0xA1:
+                return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
+            return 157 * (first_char - 0xA4) + second_char - 0x40
+        return -1
+
+
+class SJISDistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self):
+        super().__init__()
+        self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
+        self._table_size = JIS_TABLE_SIZE
+        self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str):
+        # for sjis encoding, we are interested
+        #   first  byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
+        #   second byte range: 0x40 -- 0x7e,  0x81 -- oxfe
+        # no validation needed here. State machine has done that
+        first_char, second_char = byte_str[0], byte_str[1]
+        if 0x81 <= first_char <= 0x9F:
+            order = 188 * (first_char - 0x81)
+        elif 0xE0 <= first_char <= 0xEF:
+            order = 188 * (first_char - 0xE0 + 31)
+        else:
+            return -1
+        order = order + second_char - 0x40
+        if second_char > 0x7F:
+            order = -1
+        return order
+
+
+class EUCJPDistributionAnalysis(CharDistributionAnalysis):
+    def __init__(self):
+        super().__init__()
+        self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
+        self._table_size = JIS_TABLE_SIZE
+        self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
+
+    def get_order(self, byte_str):
+        # for euc-JP encoding, we are interested
+        #   first  byte range: 0xa0 -- 0xfe
+        #   second byte range: 0xa1 -- 0xfe
+        # no validation needed here. State machine has done that
+        char = byte_str[0]
+        if char >= 0xA0:
+            return 94 * (char - 0xA1) + byte_str[1] - 0xA1
+        return -1
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetgroupprober.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetgroupprober.py
new file mode 100644
index 0000000..778ff33
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetgroupprober.py
@@ -0,0 +1,109 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .enums import ProbingState
+
+
+class CharSetGroupProber(CharSetProber):
+    def __init__(self, lang_filter=None):
+        super().__init__(lang_filter=lang_filter)
+        self._active_num = 0
+        self.probers = []
+        self._best_guess_prober = None
+
+    def reset(self):
+        super().reset()
+        self._active_num = 0
+        for prober in self.probers:
+            if prober:
+                prober.reset()
+                prober.active = True
+                self._active_num += 1
+        self._best_guess_prober = None
+
+    @property
+    def charset_name(self):
+        if not self._best_guess_prober:
+            self.get_confidence()
+            if not self._best_guess_prober:
+                return None
+        return self._best_guess_prober.charset_name
+
+    @property
+    def language(self):
+        if not self._best_guess_prober:
+            self.get_confidence()
+            if not self._best_guess_prober:
+                return None
+        return self._best_guess_prober.language
+
+    def feed(self, byte_str):
+        for prober in self.probers:
+            if not prober:
+                continue
+            if not prober.active:
+                continue
+            state = prober.feed(byte_str)
+            if not state:
+                continue
+            if state == ProbingState.FOUND_IT:
+                self._best_guess_prober = prober
+                self._state = ProbingState.FOUND_IT
+                return self.state
+            if state == ProbingState.NOT_ME:
+                prober.active = False
+                self._active_num -= 1
+                if self._active_num <= 0:
+                    self._state = ProbingState.NOT_ME
+                    return self.state
+        return self.state
+
+    def get_confidence(self):
+        state = self.state
+        if state == ProbingState.FOUND_IT:
+            return 0.99
+        if state == ProbingState.NOT_ME:
+            return 0.01
+        best_conf = 0.0
+        self._best_guess_prober = None
+        for prober in self.probers:
+            if not prober:
+                continue
+            if not prober.active:
+                self.logger.debug("%s not active", prober.charset_name)
+                continue
+            conf = prober.get_confidence()
+            self.logger.debug(
+                "%s %s confidence = %s", prober.charset_name, prober.language, conf
+            )
+            if best_conf < conf:
+                best_conf = conf
+                self._best_guess_prober = prober
+        if not self._best_guess_prober:
+            return 0.0
+        return best_conf
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetprober.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetprober.py
new file mode 100644
index 0000000..9f1afd9
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/charsetprober.py
@@ -0,0 +1,138 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#   Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+import logging
+import re
+
+from .enums import ProbingState
+
+INTERNATIONAL_WORDS_PATTERN = re.compile(
+    b"[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?"
+)
+
+
+class CharSetProber:
+
+    SHORTCUT_THRESHOLD = 0.95
+
+    def __init__(self, lang_filter=None):
+        self._state = None
+        self.lang_filter = lang_filter
+        self.logger = logging.getLogger(__name__)
+
+    def reset(self):
+        self._state = ProbingState.DETECTING
+
+    @property
+    def charset_name(self):
+        return None
+
+    def feed(self, byte_str):
+        raise NotImplementedError
+
+    @property
+    def state(self):
+        return self._state
+
+    def get_confidence(self):
+        return 0.0
+
+    @staticmethod
+    def filter_high_byte_only(buf):
+        buf = re.sub(b"([\x00-\x7F])+", b" ", buf)
+        return buf
+
+    @staticmethod
+    def filter_international_words(buf):
+        """
+        We define three types of bytes:
+        alphabet: english alphabets [a-zA-Z]
+        international: international characters [\x80-\xFF]
+        marker: everything else [^a-zA-Z\x80-\xFF]
+        The input buffer can be thought to contain a series of words delimited
+        by markers. This function works to filter all words that contain at
+        least one international character. All contiguous sequences of markers
+        are replaced by a single space ascii character.
+        This filter applies to all scripts which do not use English characters.
+        """
+        filtered = bytearray()
+
+        # This regex expression filters out only words that have at-least one
+        # international character. The word may include one marker character at
+        # the end.
+        words = INTERNATIONAL_WORDS_PATTERN.findall(buf)
+
+        for word in words:
+            filtered.extend(word[:-1])
+
+            # If the last character in the word is a marker, replace it with a
+            # space as markers shouldn't affect our analysis (they are used
+            # similarly across all languages and may thus have similar
+            # frequencies).
+            last_char = word[-1:]
+            if not last_char.isalpha() and last_char < b"\x80":
+                last_char = b" "
+            filtered.extend(last_char)
+
+        return filtered
+
+    @staticmethod
+    def remove_xml_tags(buf):
+        """
+        Returns a copy of ``buf`` that retains only the sequences of English
+        alphabet and high byte characters that are not between <> characters.
+        This filter can be applied to all scripts which contain both English
+        characters and extended ASCII characters, but is currently only used by
+        ``Latin1Prober``.
+        """
+        filtered = bytearray()
+        in_tag = False
+        prev = 0
+        buf = memoryview(buf).cast("c")
+
+        for curr, buf_char in enumerate(buf):
+            # Check if we're coming out of or entering an XML tag
+            if buf_char == b">":
+                prev = curr + 1
+                in_tag = False
+            elif buf_char == b"<":
+                if curr > prev and not in_tag:
+                    # Keep everything after last non-extended-ASCII,
+                    # non-alphabetic character
+                    filtered.extend(buf[prev:curr])
+                    # Output a space to delimit stretch we kept
+                    filtered.extend(b" ")
+                in_tag = True
+
+        # If we're not in a tag...
+        if not in_tag:
+            # Keep everything after last non-extended-ASCII, non-alphabetic
+            # character
+            filtered.extend(buf[prev:])
+
+        return filtered
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__init__.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__init__.py
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc
new file mode 100644
index 0000000..cb88808
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc
new file mode 100644
index 0000000..e21b5e1
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc
Binary files differ
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/chardetect.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/chardetect.py
new file mode 100644
index 0000000..7926fa3
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cli/chardetect.py
@@ -0,0 +1,86 @@
+"""
+Script which takes one or more file paths and reports on their detected
+encodings
+
+Example::
+
+    % chardetect somefile someotherfile
+    somefile: windows-1252 with confidence 0.5
+    someotherfile: ascii with confidence 1.0
+
+If no paths are provided, it takes its input from stdin.
+
+"""
+
+
+import argparse
+import sys
+
+from .. import __version__
+from ..universaldetector import UniversalDetector
+
+
+def description_of(lines, name="stdin"):
+    """
+    Return a string describing the probable encoding of a file or
+    list of strings.
+
+    :param lines: The lines to get the encoding of.
+    :type lines: Iterable of bytes
+    :param name: Name of file or collection of lines
+    :type name: str
+    """
+    u = UniversalDetector()
+    for line in lines:
+        line = bytearray(line)
+        u.feed(line)
+        # shortcut out of the loop to save reading further - particularly useful if we read a BOM.
+        if u.done:
+            break
+    u.close()
+    result = u.result
+    if result["encoding"]:
+        return f'{name}: {result["encoding"]} with confidence {result["confidence"]}'
+    return f"{name}: no result"
+
+
+def main(argv=None):
+    """
+    Handles command line arguments and gets things started.
+
+    :param argv: List of arguments, as if specified on the command-line.
+                 If None, ``sys.argv[1:]`` is used instead.
+    :type argv: list of str
+    """
+    # Get command line arguments
+    parser = argparse.ArgumentParser(
+        description="Takes one or more file paths and reports their detected \
+                     encodings"
+    )
+    parser.add_argument(
+        "input",
+        help="File whose encoding we would like to determine. \
+                              (default: stdin)",
+        type=argparse.FileType("rb"),
+        nargs="*",
+        default=[sys.stdin.buffer],
+    )
+    parser.add_argument(
+        "--version", action="version", version=f"%(prog)s {__version__}"
+    )
+    args = parser.parse_args(argv)
+
+    for f in args.input:
+        if f.isatty():
+            print(
+                "You are running chardetect interactively. Press "
+                "CTRL-D twice at the start of a blank line to signal the "
+                "end of your input. If you want help, run chardetect "
+                "--help\n",
+                file=sys.stderr,
+            )
+        print(description_of(f, f.name))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/codingstatemachine.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/codingstatemachine.py
new file mode 100644
index 0000000..d3e3e82
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/codingstatemachine.py
@@ -0,0 +1,88 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+import logging
+
+from .enums import MachineState
+
+
+class CodingStateMachine:
+    """
+    A state machine to verify a byte sequence for a particular encoding. For
+    each byte the detector receives, it will feed that byte to every active
+    state machine available, one byte at a time. The state machine changes its
+    state based on its previous state and the byte it receives. There are 3
+    states in a state machine that are of interest to an auto-detector:
+
+    START state: This is the state to start with, or a legal byte sequence
+                 (i.e. a valid code point) for character has been identified.
+
+    ME state:  This indicates that the state machine identified a byte sequence
+               that is specific to the charset it is designed for and that
+               there is no other possible encoding which can contain this byte
+               sequence. This will to lead to an immediate positive answer for
+               the detector.
+
+    ERROR state: This indicates the state machine identified an illegal byte
+                 sequence for that encoding. This will lead to an immediate
+                 negative answer for this encoding. Detector will exclude this
+                 encoding from consideration from here on.
+    """
+
+    def __init__(self, sm):
+        self._model = sm
+        self._curr_byte_pos = 0
+        self._curr_char_len = 0
+        self._curr_state = None
+        self.logger = logging.getLogger(__name__)
+        self.reset()
+
+    def reset(self):
+        self._curr_state = MachineState.START
+
+    def next_state(self, c):
+        # for each byte we get its class
+        # if it is first byte, we also get byte length
+        byte_class = self._model["class_table"][c]
+        if self._curr_state == MachineState.START:
+            self._curr_byte_pos = 0
+            self._curr_char_len = self._model["char_len_table"][byte_class]
+        # from byte's class and state_table, we get its next state
+        curr_state = self._curr_state * self._model["class_factor"] + byte_class
+        self._curr_state = self._model["state_table"][curr_state]
+        self._curr_byte_pos += 1
+        return self._curr_state
+
+    def get_current_charlen(self):
+        return self._curr_char_len
+
+    def get_coding_state_machine(self):
+        return self._model["name"]
+
+    @property
+    def language(self):
+        return self._model["language"]
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cp949prober.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cp949prober.py
new file mode 100644
index 0000000..28a1f3d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/cp949prober.py
@@ -0,0 +1,49 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .chardistribution import EUCKRDistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import CP949_SM_MODEL
+
+
+class CP949Prober(MultiByteCharSetProber):
+    def __init__(self):
+        super().__init__()
+        self.coding_sm = CodingStateMachine(CP949_SM_MODEL)
+        # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
+        #       not different.
+        self.distribution_analyzer = EUCKRDistributionAnalysis()
+        self.reset()
+
+    @property
+    def charset_name(self):
+        return "CP949"
+
+    @property
+    def language(self):
+        return "Korean"
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/enums.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/enums.py
new file mode 100644
index 0000000..32a77e7
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/enums.py
@@ -0,0 +1,82 @@
+"""
+All of the Enums that are used throughout the chardet package.
+
+:author: Dan Blanchard (dan.blanchard@gmail.com)
+"""
+
+
+class InputState:
+    """
+    This enum represents the different states a universal detector can be in.
+    """
+
+    PURE_ASCII = 0
+    ESC_ASCII = 1
+    HIGH_BYTE = 2
+
+
+class LanguageFilter:
+    """
+    This enum represents the different language filters we can apply to a
+    ``UniversalDetector``.
+    """
+
+    CHINESE_SIMPLIFIED = 0x01
+    CHINESE_TRADITIONAL = 0x02
+    JAPANESE = 0x04
+    KOREAN = 0x08
+    NON_CJK = 0x10
+    ALL = 0x1F
+    CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL
+    CJK = CHINESE | JAPANESE | KOREAN
+
+
+class ProbingState:
+    """
+    This enum represents the different states a prober can be in.
+    """
+
+    DETECTING = 0
+    FOUND_IT = 1
+    NOT_ME = 2
+
+
+class MachineState:
+    """
+    This enum represents the different states a state machine can be in.
+    """
+
+    START = 0
+    ERROR = 1
+    ITS_ME = 2
+
+
+class SequenceLikelihood:
+    """
+    This enum represents the likelihood of a character following the previous one.
+    """
+
+    NEGATIVE = 0
+    UNLIKELY = 1
+    LIKELY = 2
+    POSITIVE = 3
+
+    @classmethod
+    def get_num_categories(cls):
+        """:returns: The number of likelihood categories in the enum."""
+        return 4
+
+
+class CharacterCategory:
+    """
+    This enum represents the different categories language models for
+    ``SingleByteCharsetProber`` put characters into.
+
+    Anything less than CONTROL is considered a letter.
+    """
+
+    UNDEFINED = 255
+    LINE_BREAK = 254
+    SYMBOL = 253
+    DIGIT = 252
+    CONTROL = 251
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/escprober.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/escprober.py
new file mode 100644
index 0000000..d992611
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/escprober.py
@@ -0,0 +1,102 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .codingstatemachine import CodingStateMachine
+from .enums import LanguageFilter, MachineState, ProbingState
+from .escsm import (
+    HZ_SM_MODEL,
+    ISO2022CN_SM_MODEL,
+    ISO2022JP_SM_MODEL,
+    ISO2022KR_SM_MODEL,
+)
+
+
+class EscCharSetProber(CharSetProber):
+    """
+    This CharSetProber uses a "code scheme" approach for detecting encodings,
+    whereby easily recognizable escape or shift sequences are relied on to
+    identify these encodings.
+    """
+
+    def __init__(self, lang_filter=None):
+        super().__init__(lang_filter=lang_filter)
+        self.coding_sm = []
+        if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED:
+            self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL))
+            self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL))
+        if self.lang_filter & LanguageFilter.JAPANESE:
+            self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL))
+        if self.lang_filter & LanguageFilter.KOREAN:
+            self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL))
+        self.active_sm_count = None
+        self._detected_charset = None
+        self._detected_language = None
+        self._state = None
+        self.reset()
+
+    def reset(self):
+        super().reset()
+        for coding_sm in self.coding_sm:
+            if not coding_sm:
+                continue
+            coding_sm.active = True
+            coding_sm.reset()
+        self.active_sm_count = len(self.coding_sm)
+        self._detected_charset = None
+        self._detected_language = None
+
+    @property
+    def charset_name(self):
+        return self._detected_charset
+
+    @property
+    def language(self):
+        return self._detected_language
+
+    def get_confidence(self):
+        return 0.99 if self._detected_charset else 0.00
+
+    def feed(self, byte_str):
+        for c in byte_str:
+            for coding_sm in self.coding_sm:
+                if not coding_sm or not coding_sm.active:
+                    continue
+                coding_state = coding_sm.next_state(c)
+                if coding_state == MachineState.ERROR:
+                    coding_sm.active = False
+                    self.active_sm_count -= 1
+                    if self.active_sm_count <= 0:
+                        self._state = ProbingState.NOT_ME
+                        return self.state
+                elif coding_state == MachineState.ITS_ME:
+                    self._state = ProbingState.FOUND_IT
+                    self._detected_charset = coding_sm.get_coding_state_machine()
+                    self._detected_language = coding_sm.language
+                    return self.state
+
+        return self.state
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/escsm.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/escsm.py
new file mode 100644
index 0000000..3aa0f4d
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/escsm.py
@@ -0,0 +1,260 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License,  or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not,  write to the Free Software
+# Foundation,  Inc.,  51 Franklin St,  Fifth Floor,  Boston,  MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .enums import MachineState
+
+# fmt: off
+HZ_CLS = (
+    1, 0, 0, 0, 0, 0, 0, 0,  # 00 - 07
+    0, 0, 0, 0, 0, 0, 0, 0,  # 08 - 0f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 10 - 17
+    0, 0, 0, 1, 0, 0, 0, 0,  # 18 - 1f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 20 - 27
+    0, 0, 0, 0, 0, 0, 0, 0,  # 28 - 2f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 30 - 37
+    0, 0, 0, 0, 0, 0, 0, 0,  # 38 - 3f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 40 - 47
+    0, 0, 0, 0, 0, 0, 0, 0,  # 48 - 4f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 50 - 57
+    0, 0, 0, 0, 0, 0, 0, 0,  # 58 - 5f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 60 - 67
+    0, 0, 0, 0, 0, 0, 0, 0,  # 68 - 6f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 70 - 77
+    0, 0, 0, 4, 0, 5, 2, 0,  # 78 - 7f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 80 - 87
+    1, 1, 1, 1, 1, 1, 1, 1,  # 88 - 8f
+    1, 1, 1, 1, 1, 1, 1, 1,  # 90 - 97
+    1, 1, 1, 1, 1, 1, 1, 1,  # 98 - 9f
+    1, 1, 1, 1, 1, 1, 1, 1,  # a0 - a7
+    1, 1, 1, 1, 1, 1, 1, 1,  # a8 - af
+    1, 1, 1, 1, 1, 1, 1, 1,  # b0 - b7
+    1, 1, 1, 1, 1, 1, 1, 1,  # b8 - bf
+    1, 1, 1, 1, 1, 1, 1, 1,  # c0 - c7
+    1, 1, 1, 1, 1, 1, 1, 1,  # c8 - cf
+    1, 1, 1, 1, 1, 1, 1, 1,  # d0 - d7
+    1, 1, 1, 1, 1, 1, 1, 1,  # d8 - df
+    1, 1, 1, 1, 1, 1, 1, 1,  # e0 - e7
+    1, 1, 1, 1, 1, 1, 1, 1,  # e8 - ef
+    1, 1, 1, 1, 1, 1, 1, 1,  # f0 - f7
+    1, 1, 1, 1, 1, 1, 1, 1,  # f8 - ff
+)
+
+HZ_ST = (
+MachineState.START, MachineState.ERROR,      3, MachineState.START, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, # 00-07
+MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 08-0f
+MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START,      4, MachineState.ERROR, # 10-17
+     5, MachineState.ERROR,      6, MachineState.ERROR,      5,      5,      4, MachineState.ERROR, # 18-1f
+     4, MachineState.ERROR,      4,      4,      4, MachineState.ERROR,      4, MachineState.ERROR, # 20-27
+     4, MachineState.ITS_ME, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 28-2f
+)
+# fmt: on
+
+HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
+
+HZ_SM_MODEL = {
+    "class_table": HZ_CLS,
+    "class_factor": 6,
+    "state_table": HZ_ST,
+    "char_len_table": HZ_CHAR_LEN_TABLE,
+    "name": "HZ-GB-2312",
+    "language": "Chinese",
+}
+
+# fmt: off
+ISO2022CN_CLS = (
+    2, 0, 0, 0, 0, 0, 0, 0,  # 00 - 07
+    0, 0, 0, 0, 0, 0, 0, 0,  # 08 - 0f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 10 - 17
+    0, 0, 0, 1, 0, 0, 0, 0,  # 18 - 1f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 20 - 27
+    0, 3, 0, 0, 0, 0, 0, 0,  # 28 - 2f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 30 - 37
+    0, 0, 0, 0, 0, 0, 0, 0,  # 38 - 3f
+    0, 0, 0, 4, 0, 0, 0, 0,  # 40 - 47
+    0, 0, 0, 0, 0, 0, 0, 0,  # 48 - 4f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 50 - 57
+    0, 0, 0, 0, 0, 0, 0, 0,  # 58 - 5f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 60 - 67
+    0, 0, 0, 0, 0, 0, 0, 0,  # 68 - 6f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 70 - 77
+    0, 0, 0, 0, 0, 0, 0, 0,  # 78 - 7f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 80 - 87
+    2, 2, 2, 2, 2, 2, 2, 2,  # 88 - 8f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 90 - 97
+    2, 2, 2, 2, 2, 2, 2, 2,  # 98 - 9f
+    2, 2, 2, 2, 2, 2, 2, 2,  # a0 - a7
+    2, 2, 2, 2, 2, 2, 2, 2,  # a8 - af
+    2, 2, 2, 2, 2, 2, 2, 2,  # b0 - b7
+    2, 2, 2, 2, 2, 2, 2, 2,  # b8 - bf
+    2, 2, 2, 2, 2, 2, 2, 2,  # c0 - c7
+    2, 2, 2, 2, 2, 2, 2, 2,  # c8 - cf
+    2, 2, 2, 2, 2, 2, 2, 2,  # d0 - d7
+    2, 2, 2, 2, 2, 2, 2, 2,  # d8 - df
+    2, 2, 2, 2, 2, 2, 2, 2,  # e0 - e7
+    2, 2, 2, 2, 2, 2, 2, 2,  # e8 - ef
+    2, 2, 2, 2, 2, 2, 2, 2,  # f0 - f7
+    2, 2, 2, 2, 2, 2, 2, 2,  # f8 - ff
+)
+
+ISO2022CN_ST = (
+    MachineState.START,      3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 00-07
+    MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 08-0f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 10-17
+    MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR,      4, MachineState.ERROR, # 18-1f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 20-27
+        5,      6, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 28-2f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 30-37
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.START, # 38-3f
+)
+# fmt: on
+
+ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022CN_SM_MODEL = {
+    "class_table": ISO2022CN_CLS,
+    "class_factor": 9,
+    "state_table": ISO2022CN_ST,
+    "char_len_table": ISO2022CN_CHAR_LEN_TABLE,
+    "name": "ISO-2022-CN",
+    "language": "Chinese",
+}
+
+# fmt: off
+ISO2022JP_CLS = (
+    2, 0, 0, 0, 0, 0, 0, 0,  # 00 - 07
+    0, 0, 0, 0, 0, 0, 2, 2,  # 08 - 0f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 10 - 17
+    0, 0, 0, 1, 0, 0, 0, 0,  # 18 - 1f
+    0, 0, 0, 0, 7, 0, 0, 0,  # 20 - 27
+    3, 0, 0, 0, 0, 0, 0, 0,  # 28 - 2f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 30 - 37
+    0, 0, 0, 0, 0, 0, 0, 0,  # 38 - 3f
+    6, 0, 4, 0, 8, 0, 0, 0,  # 40 - 47
+    0, 9, 5, 0, 0, 0, 0, 0,  # 48 - 4f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 50 - 57
+    0, 0, 0, 0, 0, 0, 0, 0,  # 58 - 5f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 60 - 67
+    0, 0, 0, 0, 0, 0, 0, 0,  # 68 - 6f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 70 - 77
+    0, 0, 0, 0, 0, 0, 0, 0,  # 78 - 7f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 80 - 87
+    2, 2, 2, 2, 2, 2, 2, 2,  # 88 - 8f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 90 - 97
+    2, 2, 2, 2, 2, 2, 2, 2,  # 98 - 9f
+    2, 2, 2, 2, 2, 2, 2, 2,  # a0 - a7
+    2, 2, 2, 2, 2, 2, 2, 2,  # a8 - af
+    2, 2, 2, 2, 2, 2, 2, 2,  # b0 - b7
+    2, 2, 2, 2, 2, 2, 2, 2,  # b8 - bf
+    2, 2, 2, 2, 2, 2, 2, 2,  # c0 - c7
+    2, 2, 2, 2, 2, 2, 2, 2,  # c8 - cf
+    2, 2, 2, 2, 2, 2, 2, 2,  # d0 - d7
+    2, 2, 2, 2, 2, 2, 2, 2,  # d8 - df
+    2, 2, 2, 2, 2, 2, 2, 2,  # e0 - e7
+    2, 2, 2, 2, 2, 2, 2, 2,  # e8 - ef
+    2, 2, 2, 2, 2, 2, 2, 2,  # f0 - f7
+    2, 2, 2, 2, 2, 2, 2, 2,  # f8 - ff
+)
+
+ISO2022JP_ST = (
+    MachineState.START,      3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 00-07
+    MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 08-0f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 10-17
+    MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, # 18-1f
+    MachineState.ERROR,      5, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR,      4, MachineState.ERROR, MachineState.ERROR, # 20-27
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR,      6, MachineState.ITS_ME, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, # 28-2f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, # 30-37
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 38-3f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.START, MachineState.START, # 40-47
+)
+# fmt: on
+
+ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022JP_SM_MODEL = {
+    "class_table": ISO2022JP_CLS,
+    "class_factor": 10,
+    "state_table": ISO2022JP_ST,
+    "char_len_table": ISO2022JP_CHAR_LEN_TABLE,
+    "name": "ISO-2022-JP",
+    "language": "Japanese",
+}
+
+# fmt: off
+ISO2022KR_CLS = (
+    2, 0, 0, 0, 0, 0, 0, 0,  # 00 - 07
+    0, 0, 0, 0, 0, 0, 0, 0,  # 08 - 0f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 10 - 17
+    0, 0, 0, 1, 0, 0, 0, 0,  # 18 - 1f
+    0, 0, 0, 0, 3, 0, 0, 0,  # 20 - 27
+    0, 4, 0, 0, 0, 0, 0, 0,  # 28 - 2f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 30 - 37
+    0, 0, 0, 0, 0, 0, 0, 0,  # 38 - 3f
+    0, 0, 0, 5, 0, 0, 0, 0,  # 40 - 47
+    0, 0, 0, 0, 0, 0, 0, 0,  # 48 - 4f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 50 - 57
+    0, 0, 0, 0, 0, 0, 0, 0,  # 58 - 5f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 60 - 67
+    0, 0, 0, 0, 0, 0, 0, 0,  # 68 - 6f
+    0, 0, 0, 0, 0, 0, 0, 0,  # 70 - 77
+    0, 0, 0, 0, 0, 0, 0, 0,  # 78 - 7f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 80 - 87
+    2, 2, 2, 2, 2, 2, 2, 2,  # 88 - 8f
+    2, 2, 2, 2, 2, 2, 2, 2,  # 90 - 97
+    2, 2, 2, 2, 2, 2, 2, 2,  # 98 - 9f
+    2, 2, 2, 2, 2, 2, 2, 2,  # a0 - a7
+    2, 2, 2, 2, 2, 2, 2, 2,  # a8 - af
+    2, 2, 2, 2, 2, 2, 2, 2,  # b0 - b7
+    2, 2, 2, 2, 2, 2, 2, 2,  # b8 - bf
+    2, 2, 2, 2, 2, 2, 2, 2,  # c0 - c7
+    2, 2, 2, 2, 2, 2, 2, 2,  # c8 - cf
+    2, 2, 2, 2, 2, 2, 2, 2,  # d0 - d7
+    2, 2, 2, 2, 2, 2, 2, 2,  # d8 - df
+    2, 2, 2, 2, 2, 2, 2, 2,  # e0 - e7
+    2, 2, 2, 2, 2, 2, 2, 2,  # e8 - ef
+    2, 2, 2, 2, 2, 2, 2, 2,  # f0 - f7
+    2, 2, 2, 2, 2, 2, 2, 2,  # f8 - ff
+)
+
+ISO2022KR_ST = (
+    MachineState.START,      3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, # 00-07
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, # 08-0f
+    MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR,      4, MachineState.ERROR, MachineState.ERROR, # 10-17
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR,      5, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, # 18-1f
+    MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.START, MachineState.START, MachineState.START, MachineState.START, # 20-27
+)
+# fmt: on
+
+ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
+
+ISO2022KR_SM_MODEL = {
+    "class_table": ISO2022KR_CLS,
+    "class_factor": 6,
+    "state_table": ISO2022KR_ST,
+    "char_len_table": ISO2022KR_CHAR_LEN_TABLE,
+    "name": "ISO-2022-KR",
+    "language": "Korean",
+}
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/eucjpprober.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/eucjpprober.py
new file mode 100644
index 0000000..abf2e66
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/eucjpprober.py
@@ -0,0 +1,95 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .chardistribution import EUCJPDistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .enums import MachineState, ProbingState
+from .jpcntx import EUCJPContextAnalysis
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import EUCJP_SM_MODEL
+
+
+class EUCJPProber(MultiByteCharSetProber):
+    def __init__(self):
+        super().__init__()
+        self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL)
+        self.distribution_analyzer = EUCJPDistributionAnalysis()
+        self.context_analyzer = EUCJPContextAnalysis()
+        self.reset()
+
+    def reset(self):
+        super().reset()
+        self.context_analyzer.reset()
+
+    @property
+    def charset_name(self):
+        return "EUC-JP"
+
+    @property
+    def language(self):
+        return "Japanese"
+
+    def feed(self, byte_str):
+        for i, byte in enumerate(byte_str):
+            # PY3K: byte_str is a byte array, so byte is an int, not a byte
+            coding_state = self.coding_sm.next_state(byte)
+            if coding_state == MachineState.ERROR:
+                self.logger.debug(
+                    "%s %s prober hit error at byte %s",
+                    self.charset_name,
+                    self.language,
+                    i,
+                )
+                self._state = ProbingState.NOT_ME
+                break
+            if coding_state == MachineState.ITS_ME:
+                self._state = ProbingState.FOUND_IT
+                break
+            if coding_state == MachineState.START:
+                char_len = self.coding_sm.get_current_charlen()
+                if i == 0:
+                    self._last_char[1] = byte
+                    self.context_analyzer.feed(self._last_char, char_len)
+                    self.distribution_analyzer.feed(self._last_char, char_len)
+                else:
+                    self.context_analyzer.feed(byte_str[i - 1 : i + 1], char_len)
+                    self.distribution_analyzer.feed(byte_str[i - 1 : i + 1], char_len)
+
+        self._last_char[0] = byte_str[-1]
+
+        if self.state == ProbingState.DETECTING:
+            if self.context_analyzer.got_enough_data() and (
+                self.get_confidence() > self.SHORTCUT_THRESHOLD
+            ):
+                self._state = ProbingState.FOUND_IT
+
+        return self.state
+
+    def get_confidence(self):
+        context_conf = self.context_analyzer.get_confidence()
+        distrib_conf = self.distribution_analyzer.get_confidence()
+        return max(context_conf, distrib_conf)
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrfreq.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrfreq.py
new file mode 100644
index 0000000..7dc3b10
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrfreq.py
@@ -0,0 +1,196 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+# Sampling from about 20M text materials include literature and computer technology
+
+# 128  --> 0.79
+# 256  --> 0.92
+# 512  --> 0.986
+# 1024 --> 0.99944
+# 2048 --> 0.99999
+#
+# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
+# Random Distribution Ration = 512 / (2350-512) = 0.279.
+#
+# Typical Distribution Ratio
+
+EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
+
+EUCKR_TABLE_SIZE = 2352
+
+# Char to FreqOrder table ,
+# fmt: off
+EUCKR_CHAR_TO_FREQ_ORDER = (
+  13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722,  87,
+1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
+1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488,  20,1733,1269,1734,
+ 945,1400,1735,  47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
+ 116, 987, 813,1401, 683,  75,1204, 145,1740,1741,1742,1743,  16, 847, 667, 622,
+ 708,1744,1745,1746, 966, 787, 304, 129,1747,  60, 820, 123, 676,1748,1749,1750,
+1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
+ 344,1763,1764,1765,1766,  89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
+ 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
+1780, 337, 751,1058,  28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782,  19,
+1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
+1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
+1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
+1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
+ 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
+1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
+1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
+1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
+1412,1837,1838,  39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
+ 544,1023,1081, 869,  91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
+1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
+ 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
+ 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
+1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
+ 282,  96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
+1421, 268,1877,1422,1878,1879,1880, 308,1881,   2, 537,1882,1883,1215,1884,1885,
+ 127, 791,1886,1273,1423,1887,  34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
+   0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
+1894,1123,  48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
+1899, 694,1900, 909, 734,1424, 572, 866,1425, 691,  85, 524,1010, 543, 394, 841,
+1901,1902,1903,1026,1904,1905,1906,1907,1908,1909,  30, 451, 651, 988, 310,1910,
+1911,1426, 810,1216,  93,1912,1913,1277,1217,1914, 858, 759,  45,  58, 181, 610,
+ 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
+1919, 359,1920, 687,1921, 822,1922, 293,1923,1924,  40, 662, 118, 692,  29, 939,
+ 887, 640, 482, 174,1925,  69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
+ 217, 854,1163, 823,1927,1928,1929,1930, 834,1931,  78,1932, 859,1933,1063,1934,
+1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
+1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
+1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
+1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
+1283,1222,1960,1961,1962,1963,  36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
+1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
+  50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
+ 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971,   7,
+ 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
+1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
+ 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
+1995, 560, 223,1287,  98,   8, 189, 650, 978,1288,1996,1437,1997,  17, 345, 250,
+ 423, 277, 234, 512, 226,  97, 289,  42, 167,1998, 201,1999,2000, 843, 836, 824,
+ 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
+2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008,  71,1440, 745,
+ 619, 688,2009, 829,2010,2011, 147,2012,  33, 948,2013,2014,  74, 224,2015,  61,
+ 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
+2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591,  52, 724, 246,2031,2032,
+2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
+2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
+ 719,1170, 959, 440, 437, 534,  84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
+ 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
+2051,2052,2053,  59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
+ 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
+1444,2064,2065,  41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
+2069,1292,2070,2071,1445,2072,1446,2073,2074,  55, 588,  66,1447, 271,1092,2075,
+1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
+2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
+2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
+1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
+ 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
+2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
+2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
+  22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174,  73,1096, 231, 274,
+ 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
+2141,2142,2143,2144,  11, 374, 844,2145, 154,1232,  46,1461,2146, 838, 830, 721,
+1233, 106,2147,  90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
+2150,1462, 761, 565,2151, 686,2152, 649,2153,  72, 173,2154, 460, 415,2155,1463,
+2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
+2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177,  23, 530, 285,
+2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
+2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193,  10,
+2194, 613, 424,2195, 979, 108, 449, 589,  27, 172,  81,1031,  80, 774, 281, 350,
+1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
+2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
+2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
+2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
+2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
+2243, 521, 486, 548,2244,2245,2246,1473,1300,  53, 549, 137, 875,  76, 158,2247,
+1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
+1475,2249,  82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
+2256,  18, 450, 206,2257, 290, 292,1142,2258, 511, 162,  99, 346, 164, 735,2259,
+1476,1477,   4, 554, 343, 798,1099,2260,1100,2261,  43, 171,1303, 139, 215,2262,
+2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
+1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272,  67,2273,
+ 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
+2282,2283,2284,2285,2286,  70, 852,1071,2287,2288,2289,2290,  21,  56, 509, 117,
+ 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
+2294,1046,1479,2295, 340,2296,  63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
+ 808, 494,2299,2300,2301, 903,2302,  37,1072,  14,   5,2303,  79, 675,2304, 312,
+2305,2306,2307,2308,2309,1480,   6,1307,2310,2311,2312,   1, 470,  35,  24, 229,
+2313, 695, 210,  86, 778,  15, 784, 592, 779,  32,  77, 855, 964,2314, 259,2315,
+ 501, 380,2316,2317,  83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
+2320,2321,2322,2323,2324,2325,1485,2326,2327, 128,  57,  68, 261,1048, 211, 170,
+1240,  31,2328,  51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
+ 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
+1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
+2351,1490,1491,  62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
+1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
+2361,2362, 332,  12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
+ 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
+2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
+1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
+2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
+1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
+2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
+1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
+ 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
+2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
+2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
+ 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
+ 915, 489,2449,1514,1184,2450,2451, 515,  64, 427, 495,2452, 583,2453, 483, 485,
+1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
+1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
+ 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
+2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
+2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
+ 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187,  65,2494,
+ 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
+ 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
+2499,2500,  49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
+  95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
+ 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
+2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
+2533,  25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
+ 704, 504, 468, 758, 657,1528, 196,  44, 839,1246, 272, 750,2543, 765, 862,2544,
+2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
+1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
+ 249,1075,2556,2557,2558, 466, 743,2559,2560,2561,  92, 514, 426, 420, 526,2562,
+2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
+2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
+2584,1532,  54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
+   3, 458,   9,  38,2588, 107, 110, 890, 209,  26, 737, 498,2589,1534,2590, 431,
+ 202,  88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
+ 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
+2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601,  94, 175, 197, 406,
+2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
+2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
+1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
+2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
+ 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642,  # 512, 256
+)
+# fmt: on
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrprober.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrprober.py
new file mode 100644
index 0000000..154a6d2
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/euckrprober.py
@@ -0,0 +1,47 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+from .chardistribution import EUCKRDistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import EUCKR_SM_MODEL
+
+
+class EUCKRProber(MultiByteCharSetProber):
+    def __init__(self):
+        super().__init__()
+        self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL)
+        self.distribution_analyzer = EUCKRDistributionAnalysis()
+        self.reset()
+
+    @property
+    def charset_name(self):
+        return "EUC-KR"
+
+    @property
+    def language(self):
+        return "Korean"
diff --git a/venv/lib/python3.9/site-packages/pip/_vendor/chardet/euctwfreq.py b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/euctwfreq.py
new file mode 100644
index 0000000..4900ccc
--- /dev/null
+++ b/venv/lib/python3.9/site-packages/pip/_vendor/chardet/euctwfreq.py
@@ -0,0 +1,388 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#   Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301  USA
+######################### END LICENSE BLOCK #########################
+
+# EUCTW frequency table
+# Converted from big5 work
+# by Taiwan's Mandarin Promotion Council
+# <http:#www.edu.tw:81/mandr/>
+
+# 128  --> 0.42261
+# 256  --> 0.57851
+# 512  --> 0.74851
+# 1024 --> 0.89384
+# 2048 --> 0.97583
+#
+# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
+# Random Distribution Ration = 512/(5401-512)=0.105
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
+
+EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
+
+# Char to FreqOrder table
+EUCTW_TABLE_SIZE = 5376
+
+# fmt: off
+EUCTW_CHAR_TO_FREQ_ORDER = (
+    1, 1800, 1506, 255, 1431, 198, 9, 82, 6, 7310, 177, 202, 3615, 1256, 2808, 110,  # 2742
+    3735, 33, 3241, 261, 76, 44, 2113, 16, 2931, 2184, 1176, 659, 3868, 26, 3404, 2643,  # 2758
+    1198, 3869, 3313, 4060, 410, 2211, 302, 590, 361, 1963, 8, 204, 58, 4296, 7311, 1931,  # 2774
+    63, 7312, 7313, 317, 1614, 75, 222, 159, 4061, 2412, 1480, 7314, 3500, 3068, 224, 2809,  # 2790
+    3616, 3, 10, 3870, 1471, 29, 2774, 1135, 2852, 1939, 873, 130, 3242, 1123, 312, 7315,  # 2806
+    4297, 2051, 507, 252, 682, 7316, 142, 1914, 124, 206, 2932, 34, 3501, 3173, 64, 604,  # 2822
+    7317, 2494, 1976, 1977, 155, 1990, 645, 641, 1606, 7318, 3405, 337, 72, 406, 7319, 80,  # 2838
+    630, 238, 3174, 1509, 263, 939, 1092, 2644, 756, 1440, 1094, 3406, 449, 69, 2969, 591,  # 2854
+    179, 2095, 471, 115, 2034, 1843, 60, 50, 2970, 134, 806, 1868, 734, 2035, 3407, 180,  # 2870
+    995, 1607, 156, 537, 2893, 688, 7320, 319, 1305, 779, 2144, 514, 2374, 298, 4298, 359,  # 2886
+    2495, 90, 2707, 1338, 663, 11, 906, 1099, 2545, 20, 2436, 182, 532, 1716, 7321, 732,  # 2902
+    1376, 4062, 1311, 1420, 3175, 25, 2312, 1056, 113, 399, 382, 1949, 242, 3408, 2467, 529,  # 2918
+    3243, 475, 1447, 3617, 7322, 117, 21, 656, 810, 1297, 2295, 2329, 3502, 7323, 126, 4063,  # 2934
+    706, 456, 150, 613, 4299, 71, 1118, 2036, 4064, 145, 3069, 85, 835, 486, 2114, 1246,  # 2950
+    1426, 428, 727, 1285, 1015, 800, 106, 623, 303, 1281, 7324, 2127, 2354, 347, 3736, 221,  # 2966
+    3503, 3110, 7325, 1955, 1153, 4065, 83, 296, 1199, 3070, 192, 624, 93, 7326, 822, 1897,  # 2982
+    2810, 3111, 795, 2064, 991, 1554, 1542, 1592, 27, 43, 2853, 859, 139, 1456, 860, 4300,  # 2998
+    437, 712, 3871, 164, 2392, 3112, 695, 211, 3017, 2096, 195, 3872, 1608, 3504, 3505, 3618,  # 3014
+    3873, 234, 811, 2971, 2097, 3874, 2229, 1441, 3506, 1615, 2375, 668, 2076, 1638, 305, 228,  # 3030
+    1664, 4301, 467, 415, 7327, 262, 2098, 1593, 239, 108, 300, 200, 1033, 512, 1247, 2077,  # 3046
+    7328, 7329, 2173, 3176, 3619, 2673, 593, 845, 1062, 3244, 88, 1723, 2037, 3875, 1950, 212,  # 3062
+    266, 152, 149, 468, 1898, 4066, 4302, 77, 187, 7330, 3018, 37, 5, 2972, 7331, 3876,  # 3078
+    7332, 7333, 39, 2517, 4303, 2894, 3177, 2078, 55, 148, 74, 4304, 545, 483, 1474, 1029,  # 3094
+    1665, 217, 1869, 1531, 3113, 1104, 2645, 4067, 24, 172, 3507, 900, 3877, 3508, 3509, 4305,  # 3110
+    32, 1408, 2811, 1312, 329, 487, 2355, 2247, 2708, 784, 2674, 4, 3019, 3314, 1427, 1788,  # 3126
+    188, 109, 499, 7334, 3620, 1717, 1789, 888, 1217, 3020, 4306, 7335, 3510, 7336, 3315, 1520,  # 3142
+    3621, 3878, 196, 1034, 775, 7337, 7338, 929, 1815, 249, 439, 38, 7339, 1063, 7340, 794,  # 3158
+    3879, 1435, 2296, 46, 178, 3245, 2065, 7341, 2376, 7342, 214, 1709, 4307, 804, 35, 707,  # 3174
+    324, 3622, 1601, 2546, 140, 459, 4068, 7343, 7344, 1365, 839, 272, 978, 2257, 2572, 3409,  # 3190
+    2128, 1363, 3623, 1423, 697, 100, 3071, 48, 70, 1231, 495, 3114, 2193, 7345, 1294, 7346,  # 3206
+    2079, 462, 586, 1042, 3246, 853, 256, 988, 185, 2377, 3410, 1698, 434, 1084, 7347, 3411,  # 3222
+    314, 2615, 2775, 4308, 2330, 2331, 569, 2280, 637, 1816, 2518, 757, 1162, 1878, 1616, 3412,  # 3238
+    287, 1577, 2115, 768, 4309, 1671, 2854, 3511, 2519, 1321, 3737, 909, 2413, 7348, 4069, 933,  # 3254
+    3738, 7349, 2052, 2356, 1222, 4310, 765, 2414, 1322, 786, 4311, 7350, 1919, 1462, 1677, 2895,  # 3270
+    1699, 7351, 4312, 1424, 2437, 3115, 3624, 2590, 3316, 1774, 1940, 3413, 3880, 4070, 309, 1369,  # 3286
+    1130, 2812, 364, 2230, 1653, 1299, 3881, 3512, 3882, 3883, 2646, 525, 1085, 3021, 902, 2000,  # 3302
+    1475, 964, 4313, 421, 1844, 1415, 1057, 2281, 940, 1364, 3116, 376, 4314, 4315, 1381, 7,  # 3318
+    2520, 983, 2378, 336, 1710, 2675, 1845, 321, 3414, 559, 1131, 3022, 2742, 1808, 1132, 1313,  # 3334
+    265, 1481, 1857, 7352, 352, 1203, 2813, 3247, 167, 1089, 420, 2814, 776, 792, 1724, 3513,  # 3350
+    4071, 2438, 3248, 7353, 4072, 7354, 446, 229, 333, 2743, 901, 3739, 1200, 1557, 4316, 2647,  # 3366
+    1920, 395, 2744, 2676, 3740, 4073, 1835, 125, 916, 3178, 2616, 4317, 7355, 7356, 3741, 7357,  # 3382
+    7358, 7359, 4318, 3117, 3625, 1133, 2547, 1757, 3415, 1510, 2313, 1409, 3514, 7360, 2145, 438,  # 3398
+    2591, 2896, 2379, 3317, 1068, 958, 3023, 461, 311, 2855, 2677, 4074, 1915, 3179, 4075, 1978,  # 3414
+    383, 750, 2745, 2617, 4076, 274, 539, 385, 1278, 1442, 7361, 1154, 1964, 384, 561, 210,  # 3430
+    98, 1295, 2548, 3515, 7362, 1711, 2415, 1482, 3416, 3884, 2897, 1257, 129, 7363, 3742, 642,  # 3446
+    523, 2776, 2777, 2648, 7364, 141, 2231, 1333, 68, 176, 441, 876, 907, 4077, 603, 2592,  # 3462
+    710, 171, 3417, 404, 549, 18, 3118, 2393, 1410, 3626, 1666, 7365, 3516, 4319, 2898, 4320,  # 3478
+    7366, 2973, 368, 7367, 146, 366, 99, 871, 3627, 1543, 748, 807, 1586, 1185, 22, 2258,  # 3494
+    379, 3743, 3180, 7368, 3181, 505, 1941, 2618, 1991, 1382, 2314, 7369, 380, 2357, 218, 702,  # 3510
+    1817, 1248, 3418, 3024, 3517, 3318, 3249, 7370, 2974, 3628, 930, 3250, 3744, 7371, 59, 7372,  # 3526
+    585, 601, 4078, 497, 3419, 1112, 1314, 4321, 1801, 7373, 1223, 1472, 2174, 7374, 749, 1836,  # 3542
+    690, 1899, 3745, 1772, 3885, 1476, 429, 1043, 1790, 2232, 2116, 917, 4079, 447, 1086, 1629,  # 3558
+    7375, 556, 7376, 7377, 2020, 1654, 844, 1090, 105, 550, 966, 1758, 2815, 1008, 1782, 686,  # 3574
+    1095, 7378, 2282, 793, 1602, 7379, 3518, 2593, 4322, 4080, 2933, 2297, 4323, 3746, 980, 2496,  # 3590
+    544, 353, 527, 4324, 908, 2678, 2899, 7380, 381, 2619, 1942, 1348, 7381, 1341, 1252, 560,  # 3606
+    3072, 7382, 3420, 2856, 7383, 2053, 973, 886, 2080, 143, 4325, 7384, 7385, 157, 3886, 496,  # 3622
+    4081, 57, 840, 540, 2038, 4326, 4327, 3421, 2117, 1445, 970, 2259, 1748, 1965, 2081, 4082,  # 3638
+    3119, 1234, 1775, 3251, 2816, 3629, 773, 1206, 2129, 1066, 2039, 1326, 3887, 1738, 1725, 4083,  # 3654
+    279, 3120, 51, 1544, 2594, 423, 1578, 2130, 2066, 173, 4328, 1879, 7386, 7387, 1583, 264,  # 3670
+    610, 3630, 4329, 2439, 280, 154, 7388, 7389, 7390, 1739, 338, 1282, 3073, 693, 2857, 1411,  # 3686
+    1074, 3747, 2440, 7391, 4330, 7392, 7393, 1240, 952, 2394, 7394, 2900, 1538, 2679, 685, 1483,  # 3702
+    4084, 2468, 1436, 953, 4085, 2054, 4331, 671, 2395, 79, 4086, 2441, 3252, 608, 567, 2680,  # 3718
+    3422, 4087, 4088, 1691, 393, 1261, 1791, 2396, 7395, 4332, 7396, 7397, 7398, 7399, 1383, 1672,  # 3734
+    3748, 3182, 1464, 522, 1119, 661, 1150, 216, 675, 4333, 3888, 1432, 3519, 609, 4334, 2681,  # 3750
+    2397, 7400, 7401, 7402, 4089, 3025, 0, 7403, 2469, 315, 231, 2442, 301, 3319, 4335, 2380,  # 3766
+    7404, 233, 4090, 3631, 1818, 4336, 4337, 7405, 96, 1776, 1315, 2082, 7406, 257, 7407, 1809,  # 3782
+    3632, 2709, 1139, 1819, 4091, 2021, 1124, 2163, 2778, 1777, 2649, 7408, 3074, 363, 1655, 3183,  # 3798
+    7409, 2975, 7410, 7411, 7412, 3889, 1567, 3890, 718, 103, 3184, 849, 1443, 341, 3320, 2934,  # 3814
+    1484, 7413, 1712, 127, 67, 339, 4092, 2398, 679, 1412, 821, 7414, 7415, 834, 738, 351,  # 3830
+    2976, 2146, 846, 235, 1497, 1880, 418, 1992, 3749, 2710, 186, 1100, 2147, 2746, 3520, 1545,  # 3846
+    1355, 2935, 2858, 1377, 583, 3891, 4093, 2573, 2977, 7416, 1298, 3633, 1078, 2549, 3634, 2358,  # 3862
+    78, 3750, 3751, 267, 1289, 2099, 2001, 1594, 4094, 348, 369, 1274, 2194, 2175, 1837, 4338,  # 3878
+    1820, 2817, 3635, 2747, 2283, 2002, 4339, 2936, 2748, 144, 3321, 882, 4340, 3892, 2749, 3423,  # 3894
+    4341, 2901, 7417, 4095, 1726, 320, 7418, 3893, 3026, 788, 2978, 7419, 2818, 1773, 1327, 2859,  # 3910
+    3894, 2819, 7420, 1306, 4342, 2003, 1700, 3752, 3521, 2359, 2650, 787, 2022, 506, 824, 3636,  # 3926
+    534, 323, 4343, 1044, 3322, 2023, 1900, 946, 3424, 7421, 1778, 1500, 1678, 7422, 1881, 4344,  # 3942
+    165, 243, 4345, 3637, 2521, 123, 683, 4096, 764, 4346, 36, 3895, 1792, 589, 2902, 816,  # 3958
+    626, 1667, 3027, 2233, 1639, 1555, 1622, 3753, 3896, 7423, 3897, 2860, 1370, 1228, 1932, 891,  # 3974
+    2083, 2903, 304, 4097, 7424, 292, 2979, 2711, 3522, 691, 2100, 4098, 1115, 4347, 118, 662,  # 3990
+    7425, 611, 1156, 854, 2381, 1316, 2861, 2, 386, 515, 2904, 7426, 7427, 3253, 868, 2234,  # 4006
+    1486, 855, 2651, 785, 2212, 3028, 7428, 1040, 3185, 3523, 7429, 3121, 448, 7430, 1525, 7431,  # 4022
+    2164, 4348, 7432, 3754, 7433, 4099, 2820, 3524, 3122, 503, 818, 3898, 3123, 1568, 814, 676,  # 4038
+    1444, 306, 1749, 7434, 3755, 1416, 1030, 197, 1428, 805, 2821, 1501, 4349, 7435, 7436, 7437,  # 4054
+    1993, 7438, 4350, 7439, 7440, 2195, 13, 2779, 3638, 2980, 3124, 1229, 1916, 7441, 3756, 2131,  # 4070
+    7442, 4100, 4351, 2399, 3525, 7443, 2213, 1511, 1727, 1120, 7444, 7445, 646, 3757, 2443, 307,  # 4086
+    7446, 7447, 1595, 3186, 7448, 7449, 7450, 3639, 1113, 1356, 3899, 1465, 2522, 2523, 7451, 519,  # 4102