Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +217 -0
- Dockerfile +12 -0
- README.md +73 -13
- __pycache__/flowchart.cpython-310.pyc +0 -0
- __pycache__/flowchart.cpython-313.pyc +0 -0
- __pycache__/flowchart.cpython-39.pyc +0 -0
- __pycache__/prompts.cpython-310.pyc +0 -0
- __pycache__/prompts.cpython-313.pyc +0 -0
- __pycache__/prompts.cpython-39.pyc +0 -0
- architecture.png +3 -0
- flowchart.py +83 -0
- latex.txt +129 -0
- main.py +178 -0
- prompts.py +206 -0
- requirements.txt +10 -0
- venv/.gitignore +2 -0
- venv/bin/Activate.ps1 +248 -0
- venv/bin/activate +76 -0
- venv/bin/activate.csh +27 -0
- venv/bin/activate.fish +69 -0
- venv/bin/distro +8 -0
- venv/bin/docx2txt +10 -0
- venv/bin/f2py +8 -0
- venv/bin/httpx +8 -0
- venv/bin/jsonschema +8 -0
- venv/bin/markdown-it +8 -0
- venv/bin/normalizer +8 -0
- venv/bin/numpy-config +8 -0
- venv/bin/pip +8 -0
- venv/bin/pip3 +8 -0
- venv/bin/pip3.13 +8 -0
- venv/bin/pygmentize +8 -0
- venv/bin/python +0 -0
- venv/bin/python3 +0 -0
- venv/bin/python3.13 +0 -0
- venv/bin/streamlit +8 -0
- venv/bin/streamlit.cmd +16 -0
- venv/etc/jupyter/nbconfig/notebook.d/pydeck.json +5 -0
- venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/AUTHORS +59 -0
- venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/INSTALLER +1 -0
- venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/LICENSE +29 -0
- venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/METADATA +295 -0
- venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/RECORD +82 -0
- venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/WHEEL +5 -0
- venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/top_level.txt +1 -0
- venv/lib/python3.13/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER +1 -0
- venv/lib/python3.13/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt +28 -0
- venv/lib/python3.13/site-packages/MarkupSafe-3.0.2.dist-info/METADATA +92 -0
- venv/lib/python3.13/site-packages/MarkupSafe-3.0.2.dist-info/RECORD +14 -0
- venv/lib/python3.13/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL +5 -0
.gitattributes
CHANGED
@@ -33,3 +33,220 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
architecture.png filter=lfs diff=lfs merge=lfs -text
|
37 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libbrotlicommon.1.1.0.dylib filter=lfs diff=lfs merge=lfs -text
|
38 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libbrotlidec.1.1.0.dylib filter=lfs diff=lfs merge=lfs -text
|
39 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libfreetype.6.dylib filter=lfs diff=lfs merge=lfs -text
|
40 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libharfbuzz.0.dylib filter=lfs diff=lfs merge=lfs -text
|
41 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libjpeg.62.4.0.dylib filter=lfs diff=lfs merge=lfs -text
|
42 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/liblcms2.2.dylib filter=lfs diff=lfs merge=lfs -text
|
43 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/liblzma.5.dylib filter=lfs diff=lfs merge=lfs -text
|
44 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libopenjp2.2.5.3.dylib filter=lfs diff=lfs merge=lfs -text
|
45 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libpng16.16.dylib filter=lfs diff=lfs merge=lfs -text
|
46 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libtiff.6.dylib filter=lfs diff=lfs merge=lfs -text
|
47 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libwebp.7.dylib filter=lfs diff=lfs merge=lfs -text
|
48 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libwebpmux.3.dylib filter=lfs diff=lfs merge=lfs -text
|
49 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libxcb.1.1.0.dylib filter=lfs diff=lfs merge=lfs -text
|
50 |
+
venv/lib/python3.13/site-packages/PIL/.dylibs/libz.1.3.1.zlib-ng.dylib filter=lfs diff=lfs merge=lfs -text
|
51 |
+
venv/lib/python3.13/site-packages/PIL/__pycache__/Image.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
52 |
+
venv/lib/python3.13/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
53 |
+
venv/lib/python3.13/site-packages/PIL/_imaging.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
54 |
+
venv/lib/python3.13/site-packages/PIL/_imagingft.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
55 |
+
venv/lib/python3.13/site-packages/__pycache__/typing_extensions.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
56 |
+
venv/lib/python3.13/site-packages/altair/vegalite/v5/__pycache__/api.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
57 |
+
venv/lib/python3.13/site-packages/altair/vegalite/v5/schema/__pycache__/_config.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
58 |
+
venv/lib/python3.13/site-packages/altair/vegalite/v5/schema/__pycache__/channels.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
59 |
+
venv/lib/python3.13/site-packages/altair/vegalite/v5/schema/__pycache__/core.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
60 |
+
venv/lib/python3.13/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
61 |
+
venv/lib/python3.13/site-packages/charset_normalizer/md.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
62 |
+
venv/lib/python3.13/site-packages/charset_normalizer/md__mypyc.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
63 |
+
venv/lib/python3.13/site-packages/click/__pycache__/core.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
64 |
+
venv/lib/python3.13/site-packages/google/_upb/_message.abi3.so filter=lfs diff=lfs merge=lfs -text
|
65 |
+
venv/lib/python3.13/site-packages/google/protobuf/__pycache__/descriptor_pb2.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
66 |
+
venv/lib/python3.13/site-packages/idna/__pycache__/uts46data.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
67 |
+
venv/lib/python3.13/site-packages/jinja2/__pycache__/compiler.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
68 |
+
venv/lib/python3.13/site-packages/jsonschema/tests/__pycache__/test_validators.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
69 |
+
venv/lib/python3.13/site-packages/lxml/_elementpath.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
70 |
+
venv/lib/python3.13/site-packages/lxml/builder.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
71 |
+
venv/lib/python3.13/site-packages/lxml/etree.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
72 |
+
venv/lib/python3.13/site-packages/lxml/html/diff.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
73 |
+
venv/lib/python3.13/site-packages/lxml/objectify.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
74 |
+
venv/lib/python3.13/site-packages/lxml/sax.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
75 |
+
venv/lib/python3.13/site-packages/narwhals/__pycache__/dataframe.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
76 |
+
venv/lib/python3.13/site-packages/narwhals/__pycache__/expr.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
77 |
+
venv/lib/python3.13/site-packages/narwhals/__pycache__/series.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
78 |
+
venv/lib/python3.13/site-packages/numpy/_core/__pycache__/_add_newdocs.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
79 |
+
venv/lib/python3.13/site-packages/numpy/_core/__pycache__/fromnumeric.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
80 |
+
venv/lib/python3.13/site-packages/numpy/_core/_multiarray_tests.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
81 |
+
venv/lib/python3.13/site-packages/numpy/_core/_multiarray_umath.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
82 |
+
venv/lib/python3.13/site-packages/numpy/_core/_simd.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
83 |
+
venv/lib/python3.13/site-packages/numpy/_core/tests/__pycache__/test_datetime.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
84 |
+
venv/lib/python3.13/site-packages/numpy/_core/tests/__pycache__/test_dtype.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
85 |
+
venv/lib/python3.13/site-packages/numpy/_core/tests/__pycache__/test_multiarray.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
86 |
+
venv/lib/python3.13/site-packages/numpy/_core/tests/__pycache__/test_nditer.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
87 |
+
venv/lib/python3.13/site-packages/numpy/_core/tests/__pycache__/test_numeric.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
88 |
+
venv/lib/python3.13/site-packages/numpy/_core/tests/__pycache__/test_regression.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
89 |
+
venv/lib/python3.13/site-packages/numpy/_core/tests/__pycache__/test_ufunc.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
90 |
+
venv/lib/python3.13/site-packages/numpy/_core/tests/__pycache__/test_umath.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
91 |
+
venv/lib/python3.13/site-packages/numpy/f2py/__pycache__/crackfortran.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
92 |
+
venv/lib/python3.13/site-packages/numpy/fft/_pocketfft_umath.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
93 |
+
venv/lib/python3.13/site-packages/numpy/lib/__pycache__/_function_base_impl.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
94 |
+
venv/lib/python3.13/site-packages/numpy/lib/tests/__pycache__/test_function_base.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
95 |
+
venv/lib/python3.13/site-packages/numpy/lib/tests/__pycache__/test_io.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
96 |
+
venv/lib/python3.13/site-packages/numpy/linalg/__pycache__/_linalg.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
97 |
+
venv/lib/python3.13/site-packages/numpy/linalg/_umath_linalg.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
98 |
+
venv/lib/python3.13/site-packages/numpy/linalg/tests/__pycache__/test_linalg.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
99 |
+
venv/lib/python3.13/site-packages/numpy/ma/__pycache__/core.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
100 |
+
venv/lib/python3.13/site-packages/numpy/ma/tests/__pycache__/test_core.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
101 |
+
venv/lib/python3.13/site-packages/numpy/ma/tests/__pycache__/test_extras.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
102 |
+
venv/lib/python3.13/site-packages/numpy/random/_bounded_integers.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
103 |
+
venv/lib/python3.13/site-packages/numpy/random/_common.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
104 |
+
venv/lib/python3.13/site-packages/numpy/random/_generator.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
105 |
+
venv/lib/python3.13/site-packages/numpy/random/_mt19937.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
106 |
+
venv/lib/python3.13/site-packages/numpy/random/_pcg64.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
107 |
+
venv/lib/python3.13/site-packages/numpy/random/_philox.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
108 |
+
venv/lib/python3.13/site-packages/numpy/random/bit_generator.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
109 |
+
venv/lib/python3.13/site-packages/numpy/random/mtrand.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
110 |
+
venv/lib/python3.13/site-packages/numpy/random/tests/__pycache__/test_generator_mt19937.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
111 |
+
venv/lib/python3.13/site-packages/numpy/random/tests/__pycache__/test_random.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
112 |
+
venv/lib/python3.13/site-packages/numpy/random/tests/__pycache__/test_randomstate.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
113 |
+
venv/lib/python3.13/site-packages/numpy/testing/_private/__pycache__/utils.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
114 |
+
venv/lib/python3.13/site-packages/numpy/testing/tests/__pycache__/test_utils.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
115 |
+
venv/lib/python3.13/site-packages/pandas/_libs/algos.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
116 |
+
venv/lib/python3.13/site-packages/pandas/_libs/arrays.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
117 |
+
venv/lib/python3.13/site-packages/pandas/_libs/groupby.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
118 |
+
venv/lib/python3.13/site-packages/pandas/_libs/hashing.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
119 |
+
venv/lib/python3.13/site-packages/pandas/_libs/hashtable.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
120 |
+
venv/lib/python3.13/site-packages/pandas/_libs/index.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
121 |
+
venv/lib/python3.13/site-packages/pandas/_libs/internals.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
122 |
+
venv/lib/python3.13/site-packages/pandas/_libs/interval.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
123 |
+
venv/lib/python3.13/site-packages/pandas/_libs/join.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
124 |
+
venv/lib/python3.13/site-packages/pandas/_libs/lib.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
125 |
+
venv/lib/python3.13/site-packages/pandas/_libs/missing.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
126 |
+
venv/lib/python3.13/site-packages/pandas/_libs/ops.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
127 |
+
venv/lib/python3.13/site-packages/pandas/_libs/parsers.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
128 |
+
venv/lib/python3.13/site-packages/pandas/_libs/reshape.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
129 |
+
venv/lib/python3.13/site-packages/pandas/_libs/sas.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
130 |
+
venv/lib/python3.13/site-packages/pandas/_libs/sparse.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
131 |
+
venv/lib/python3.13/site-packages/pandas/_libs/testing.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
132 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslib.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
133 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/ccalendar.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
134 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/conversion.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
135 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/dtypes.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
136 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/fields.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
137 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/nattype.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
138 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/np_datetime.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
139 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/offsets.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
140 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/parsing.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
141 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/period.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
142 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/strptime.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
143 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/timedeltas.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
144 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/timestamps.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
145 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/timezones.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
146 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/tzconversion.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
147 |
+
venv/lib/python3.13/site-packages/pandas/_libs/tslibs/vectorized.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
148 |
+
venv/lib/python3.13/site-packages/pandas/_libs/window/aggregations.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
149 |
+
venv/lib/python3.13/site-packages/pandas/_libs/window/indexers.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
150 |
+
venv/lib/python3.13/site-packages/pandas/_libs/writers.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
151 |
+
venv/lib/python3.13/site-packages/pandas/core/__pycache__/frame.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
152 |
+
venv/lib/python3.13/site-packages/pandas/core/__pycache__/generic.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
153 |
+
venv/lib/python3.13/site-packages/pandas/core/__pycache__/indexing.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
154 |
+
venv/lib/python3.13/site-packages/pandas/core/__pycache__/series.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
155 |
+
venv/lib/python3.13/site-packages/pandas/core/arrays/__pycache__/categorical.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
156 |
+
venv/lib/python3.13/site-packages/pandas/core/arrays/arrow/__pycache__/array.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
157 |
+
venv/lib/python3.13/site-packages/pandas/core/groupby/__pycache__/groupby.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
158 |
+
venv/lib/python3.13/site-packages/pandas/core/indexes/__pycache__/base.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
159 |
+
venv/lib/python3.13/site-packages/pandas/core/indexes/__pycache__/multi.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
160 |
+
venv/lib/python3.13/site-packages/pandas/core/strings/__pycache__/accessor.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
161 |
+
venv/lib/python3.13/site-packages/pandas/io/__pycache__/pytables.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
162 |
+
venv/lib/python3.13/site-packages/pandas/io/__pycache__/sql.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
163 |
+
venv/lib/python3.13/site-packages/pandas/io/__pycache__/stata.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
164 |
+
venv/lib/python3.13/site-packages/pandas/io/formats/__pycache__/style.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
165 |
+
venv/lib/python3.13/site-packages/pandas/tests/__pycache__/test_algos.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
166 |
+
venv/lib/python3.13/site-packages/pandas/tests/arithmetic/__pycache__/test_datetime64.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
167 |
+
venv/lib/python3.13/site-packages/pandas/tests/arithmetic/__pycache__/test_timedelta64.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
168 |
+
venv/lib/python3.13/site-packages/pandas/tests/computation/__pycache__/test_eval.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
169 |
+
venv/lib/python3.13/site-packages/pandas/tests/copy_view/__pycache__/test_methods.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
170 |
+
venv/lib/python3.13/site-packages/pandas/tests/dtypes/__pycache__/test_inference.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
171 |
+
venv/lib/python3.13/site-packages/pandas/tests/extension/__pycache__/test_arrow.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
172 |
+
venv/lib/python3.13/site-packages/pandas/tests/frame/__pycache__/test_arithmetic.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
173 |
+
venv/lib/python3.13/site-packages/pandas/tests/frame/__pycache__/test_constructors.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
174 |
+
venv/lib/python3.13/site-packages/pandas/tests/frame/__pycache__/test_reductions.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
175 |
+
venv/lib/python3.13/site-packages/pandas/tests/frame/__pycache__/test_stack_unstack.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
176 |
+
venv/lib/python3.13/site-packages/pandas/tests/frame/indexing/__pycache__/test_indexing.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
177 |
+
venv/lib/python3.13/site-packages/pandas/tests/groupby/__pycache__/test_groupby.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
178 |
+
venv/lib/python3.13/site-packages/pandas/tests/indexing/__pycache__/test_loc.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
179 |
+
venv/lib/python3.13/site-packages/pandas/tests/io/__pycache__/test_sql.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
180 |
+
venv/lib/python3.13/site-packages/pandas/tests/io/__pycache__/test_stata.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
181 |
+
venv/lib/python3.13/site-packages/pandas/tests/io/formats/__pycache__/test_format.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
182 |
+
venv/lib/python3.13/site-packages/pandas/tests/io/json/__pycache__/test_pandas.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
183 |
+
venv/lib/python3.13/site-packages/pandas/tests/plotting/__pycache__/test_datetimelike.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
184 |
+
venv/lib/python3.13/site-packages/pandas/tests/plotting/frame/__pycache__/test_frame.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
185 |
+
venv/lib/python3.13/site-packages/pandas/tests/resample/__pycache__/test_datetime_index.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
186 |
+
venv/lib/python3.13/site-packages/pandas/tests/reshape/__pycache__/test_pivot.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
187 |
+
venv/lib/python3.13/site-packages/pandas/tests/reshape/merge/__pycache__/test_merge.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
188 |
+
venv/lib/python3.13/site-packages/pandas/tests/series/__pycache__/test_constructors.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
189 |
+
venv/lib/python3.13/site-packages/pandas/tests/tools/__pycache__/test_to_datetime.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
190 |
+
venv/lib/python3.13/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
191 |
+
venv/lib/python3.13/site-packages/pip/_vendor/distlib/t64-arm.exe filter=lfs diff=lfs merge=lfs -text
|
192 |
+
venv/lib/python3.13/site-packages/pip/_vendor/distlib/t64.exe filter=lfs diff=lfs merge=lfs -text
|
193 |
+
venv/lib/python3.13/site-packages/pip/_vendor/distlib/w64-arm.exe filter=lfs diff=lfs merge=lfs -text
|
194 |
+
venv/lib/python3.13/site-packages/pip/_vendor/distlib/w64.exe filter=lfs diff=lfs merge=lfs -text
|
195 |
+
venv/lib/python3.13/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
196 |
+
venv/lib/python3.13/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
197 |
+
venv/lib/python3.13/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
198 |
+
venv/lib/python3.13/site-packages/pip/_vendor/rich/__pycache__/console.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
199 |
+
venv/lib/python3.13/site-packages/pyarrow/_acero.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
200 |
+
venv/lib/python3.13/site-packages/pyarrow/_azurefs.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
201 |
+
venv/lib/python3.13/site-packages/pyarrow/_compute.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
202 |
+
venv/lib/python3.13/site-packages/pyarrow/_csv.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
203 |
+
venv/lib/python3.13/site-packages/pyarrow/_dataset.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
204 |
+
venv/lib/python3.13/site-packages/pyarrow/_dataset_orc.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
205 |
+
venv/lib/python3.13/site-packages/pyarrow/_dataset_parquet.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
206 |
+
venv/lib/python3.13/site-packages/pyarrow/_dataset_parquet_encryption.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
207 |
+
venv/lib/python3.13/site-packages/pyarrow/_feather.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
208 |
+
venv/lib/python3.13/site-packages/pyarrow/_flight.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
209 |
+
venv/lib/python3.13/site-packages/pyarrow/_fs.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
210 |
+
venv/lib/python3.13/site-packages/pyarrow/_gcsfs.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
211 |
+
venv/lib/python3.13/site-packages/pyarrow/_hdfs.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
212 |
+
venv/lib/python3.13/site-packages/pyarrow/_json.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
213 |
+
venv/lib/python3.13/site-packages/pyarrow/_orc.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
214 |
+
venv/lib/python3.13/site-packages/pyarrow/_parquet.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
215 |
+
venv/lib/python3.13/site-packages/pyarrow/_parquet_encryption.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
216 |
+
venv/lib/python3.13/site-packages/pyarrow/_pyarrow_cpp_tests.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
217 |
+
venv/lib/python3.13/site-packages/pyarrow/_s3fs.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
218 |
+
venv/lib/python3.13/site-packages/pyarrow/_substrait.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
219 |
+
venv/lib/python3.13/site-packages/pyarrow/lib.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
220 |
+
venv/lib/python3.13/site-packages/pyarrow/libarrow.1801.dylib filter=lfs diff=lfs merge=lfs -text
|
221 |
+
venv/lib/python3.13/site-packages/pyarrow/libarrow_acero.1801.dylib filter=lfs diff=lfs merge=lfs -text
|
222 |
+
venv/lib/python3.13/site-packages/pyarrow/libarrow_dataset.1801.dylib filter=lfs diff=lfs merge=lfs -text
|
223 |
+
venv/lib/python3.13/site-packages/pyarrow/libarrow_flight.1801.dylib filter=lfs diff=lfs merge=lfs -text
|
224 |
+
venv/lib/python3.13/site-packages/pyarrow/libarrow_python.dylib filter=lfs diff=lfs merge=lfs -text
|
225 |
+
venv/lib/python3.13/site-packages/pyarrow/libarrow_python_flight.dylib filter=lfs diff=lfs merge=lfs -text
|
226 |
+
venv/lib/python3.13/site-packages/pyarrow/libarrow_substrait.1801.dylib filter=lfs diff=lfs merge=lfs -text
|
227 |
+
venv/lib/python3.13/site-packages/pyarrow/libparquet.1801.dylib filter=lfs diff=lfs merge=lfs -text
|
228 |
+
venv/lib/python3.13/site-packages/pyarrow/tests/__pycache__/test_array.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
229 |
+
venv/lib/python3.13/site-packages/pyarrow/tests/__pycache__/test_compute.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
230 |
+
venv/lib/python3.13/site-packages/pyarrow/tests/__pycache__/test_convert_builtin.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
231 |
+
venv/lib/python3.13/site-packages/pyarrow/tests/__pycache__/test_csv.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
232 |
+
venv/lib/python3.13/site-packages/pyarrow/tests/__pycache__/test_dataset.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
233 |
+
venv/lib/python3.13/site-packages/pyarrow/tests/__pycache__/test_extension_type.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
234 |
+
venv/lib/python3.13/site-packages/pyarrow/tests/__pycache__/test_flight.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
235 |
+
venv/lib/python3.13/site-packages/pyarrow/tests/__pycache__/test_io.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
236 |
+
venv/lib/python3.13/site-packages/pyarrow/tests/__pycache__/test_pandas.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
237 |
+
venv/lib/python3.13/site-packages/pyarrow/tests/__pycache__/test_table.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
238 |
+
venv/lib/python3.13/site-packages/pydantic/__pycache__/json_schema.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
239 |
+
venv/lib/python3.13/site-packages/pydantic/_internal/__pycache__/_generate_schema.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
240 |
+
venv/lib/python3.13/site-packages/pydantic_core/__pycache__/core_schema.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
241 |
+
venv/lib/python3.13/site-packages/pydantic_core/_pydantic_core.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
242 |
+
venv/lib/python3.13/site-packages/pydeck/nbextension/static/index.js.map filter=lfs diff=lfs merge=lfs -text
|
243 |
+
venv/lib/python3.13/site-packages/pygments/lexers/__pycache__/lisp.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
244 |
+
venv/lib/python3.13/site-packages/rich/__pycache__/_emoji_codes.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
245 |
+
venv/lib/python3.13/site-packages/rich/__pycache__/console.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
246 |
+
venv/lib/python3.13/site-packages/rpds/rpds.cpython-313-darwin.so filter=lfs diff=lfs merge=lfs -text
|
247 |
+
venv/lib/python3.13/site-packages/streamlit/static/static/media/MaterialSymbols-Rounded.MSqyuJUI.woff2 filter=lfs diff=lfs merge=lfs -text
|
248 |
+
venv/lib/python3.13/site-packages/streamlit/static/static/media/fireworks.B4d-_KUe.gif filter=lfs diff=lfs merge=lfs -text
|
249 |
+
venv/lib/python3.13/site-packages/tornado/__pycache__/web.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
250 |
+
venv/lib/python3.13/site-packages/tornado/speedups.abi3.so filter=lfs diff=lfs merge=lfs -text
|
251 |
+
venv/lib/python3.13/site-packages/tornado/test/__pycache__/web_test.cpython-313.pyc filter=lfs diff=lfs merge=lfs -text
|
252 |
+
venv/share/jupyter/nbextensions/pydeck/index.js.map filter=lfs diff=lfs merge=lfs -text
|
Dockerfile
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.10-slim
|
2 |
+
|
3 |
+
WORKDIR /app
|
4 |
+
|
5 |
+
COPY requirements.txt .
|
6 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
7 |
+
|
8 |
+
COPY . .
|
9 |
+
|
10 |
+
EXPOSE 8501
|
11 |
+
|
12 |
+
CMD ["streamlit", "run", "main.py"]
|
README.md
CHANGED
@@ -1,13 +1,73 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# AI-Powered Resume Analyzer and Enhancer
|
2 |
+
This application is an AI-powered tool that analyzes and enhances resumes based on job descriptions. It uses advanced language models to provide detailed insights and improvements for job seekers.
|
3 |
+
|
4 |
+
|
5 |
+
|
6 |
+
<p align="center"> <img src="architecture.png" alt="Image generated using Claude AI"> <br> <em>Image generated using Claude AI</em> </p>
|
7 |
+
|
8 |
+
## Features
|
9 |
+
|
10 |
+
- Resume upload (DOCX format)
|
11 |
+
- Job description input
|
12 |
+
- Quick and in-depth resume analysis
|
13 |
+
- Resume enhancement
|
14 |
+
- Output in multiple formats (DOCX, HTML)
|
15 |
+
|
16 |
+
## Architecture
|
17 |
+
|
18 |
+
The application follows this high-level flow:
|
19 |
+
|
20 |
+
1. Start Application
|
21 |
+
2. Upload DOCX Resume
|
22 |
+
3. Input Job Description
|
23 |
+
4. Input GROQ API Key
|
24 |
+
5. Choose Action (Analyze or Enhance)
|
25 |
+
6. If Analyze:
|
26 |
+
- Choose Analysis Type (Quick or In-Depth)
|
27 |
+
- Perform Analysis
|
28 |
+
- Display Results
|
29 |
+
7. If Enhance:
|
30 |
+
- Perform In-Depth Analysis
|
31 |
+
- Enhance Resume
|
32 |
+
- Generate Enhanced Outputs
|
33 |
+
|
34 |
+
## Key Components
|
35 |
+
|
36 |
+
- **Streamlit**: For the web interface
|
37 |
+
- **python-docx**: To process DOCX files
|
38 |
+
- **GROQ API**: For accessing AI models
|
39 |
+
- **LLaMA 3 70B**: Large language model for analysis and enhancement
|
40 |
+
- **Graphviz**: For generating the architecture diagram
|
41 |
+
|
42 |
+
## Setup and Installation
|
43 |
+
|
44 |
+
1. Clone the repository
|
45 |
+
2. Install required packages: `pip install - r requirements.txt`
|
46 |
+
3. Set up a GROQ API account and obtain an API key from here (https://console.groq.com/keys?_gl=1*1ozbol6*_gcl_au*MTc1ODk5MDQ0Mi4xNzM2NTgwNTgx*_ga*NDM2OTA5NjI1LjE3MzY1ODA1ODA.*_ga_4TD0X2GEZG*MTczNjU4MDU4MC4xLjAuMTczNjU4MDU4MC42MC4wLjA.)
|
47 |
+
|
48 |
+
## Usage
|
49 |
+
|
50 |
+
1. Run the Streamlit app: `streamlit run main.py`
|
51 |
+
2. Upload your resume (DOCX format)
|
52 |
+
3. Enter the job description
|
53 |
+
4. Provide your GROQ API key
|
54 |
+
5. Choose to analyze or enhance your resume
|
55 |
+
6. View the results or download the enhanced resume
|
56 |
+
|
57 |
+
## Modules
|
58 |
+
|
59 |
+
- `main.py`: Main Streamlit application
|
60 |
+
- `prompts.py`: Prompts for handling resume parsing and formatting
|
61 |
+
- `flowhchart.py`: Flow chart vizualization
|
62 |
+
|
63 |
+
## Dependencies
|
64 |
+
|
65 |
+
- Streamlit
|
66 |
+
- python-docx
|
67 |
+
- groq
|
68 |
+
- graphviz
|
69 |
+
- docx2txt
|
70 |
+
|
71 |
+
## Note
|
72 |
+
|
73 |
+
Ensure you have a valid GROQ API key and sufficient credits for using the LLaMA 3 70B model. The application's performance depends on the quality and availability of the AI model.
|
__pycache__/flowchart.cpython-310.pyc
ADDED
Binary file (2.4 kB). View file
|
|
__pycache__/flowchart.cpython-313.pyc
ADDED
Binary file (3.52 kB). View file
|
|
__pycache__/flowchart.cpython-39.pyc
ADDED
Binary file (2.37 kB). View file
|
|
__pycache__/prompts.cpython-310.pyc
ADDED
Binary file (8.14 kB). View file
|
|
__pycache__/prompts.cpython-313.pyc
ADDED
Binary file (7.97 kB). View file
|
|
__pycache__/prompts.cpython-39.pyc
ADDED
Binary file (8.07 kB). View file
|
|
architecture.png
ADDED
![]() |
Git LFS Details
|
flowchart.py
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import graphviz
|
2 |
+
import streamlit as st
|
3 |
+
|
4 |
+
def display_architecture_diagram():
|
5 |
+
# Create a directed graph
|
6 |
+
graph = graphviz.Digraph(format='png')
|
7 |
+
|
8 |
+
# Define node styles
|
9 |
+
styles = {
|
10 |
+
'start': {'shape': 'cylinder', 'style': 'filled', 'fillcolor': '#22c55e', 'fontcolor': 'white'},
|
11 |
+
'input': {'shape': 'box', 'style': 'filled', 'fillcolor': '#f97316', 'fontcolor': 'white'},
|
12 |
+
'decision': {'shape': 'diamond', 'style': 'filled', 'fillcolor': '#0ea5e9', 'fontcolor': 'white'},
|
13 |
+
'process': {'shape': 'box', 'style': 'filled', 'fillcolor': '#8b5cf6', 'fontcolor': 'white'},
|
14 |
+
'output': {'shape': 'box', 'style': 'filled', 'fillcolor': '#ec4899', 'fontcolor': 'white'}
|
15 |
+
}
|
16 |
+
|
17 |
+
# Add nodes
|
18 |
+
graph.node('Start', '🚀 Start\nApplication', **styles['start'])
|
19 |
+
graph.node('Upload', '📄 Upload DOCX\nResume', **styles['input'])
|
20 |
+
graph.node('JobDesc', '💼 Input Job\nDescription', **styles['input'])
|
21 |
+
graph.node('ApiKey', '🔑 Input GROQ\nAPI Key', **styles['input'])
|
22 |
+
graph.node('ChooseAction', '🔄 Choose\nAction', **styles['decision'])
|
23 |
+
graph.node('AnalysisType', '📊 Analysis\nType', **styles['decision'])
|
24 |
+
|
25 |
+
# Add process nodes with detailed content
|
26 |
+
quick_analysis = """Quick Analysis
|
27 |
+
──────────────
|
28 |
+
• Skills Match Rating
|
29 |
+
• Experience Alignment
|
30 |
+
• Pros and Cons
|
31 |
+
• Match Percentage"""
|
32 |
+
graph.node('QuickAnalysis', quick_analysis, **styles['process'])
|
33 |
+
|
34 |
+
in_depth = """In-Depth Analysisc
|
35 |
+
──────────────
|
36 |
+
• Comprehensive Skill Gap
|
37 |
+
• Detailed Experience Review
|
38 |
+
• Career Path Alignment
|
39 |
+
• Strategic Recommendations"""
|
40 |
+
graph.node('InDepthAnalysis', in_depth, **styles['process'])
|
41 |
+
|
42 |
+
enhancement = """Resume Enhancement
|
43 |
+
──────────────
|
44 |
+
• In-Depth Analysis
|
45 |
+
• Format Optimization
|
46 |
+
• Content Improvement
|
47 |
+
• Design Enhancement"""
|
48 |
+
graph.node('Enhancement', enhancement, **styles['process'])
|
49 |
+
|
50 |
+
# Add output nodes
|
51 |
+
results = """Analysis Results
|
52 |
+
──────────────
|
53 |
+
📊 Analysis Summary
|
54 |
+
📝 Recommendations
|
55 |
+
✅ Action Items"""
|
56 |
+
graph.node('Results', results, **styles['output'])
|
57 |
+
|
58 |
+
enhanced = """Enhanced Resume
|
59 |
+
──────────────
|
60 |
+
📑 Optimized DOCX
|
61 |
+
🖍️ Text Highlighting Changes
|
62 |
+
🌐 HTML Version"""
|
63 |
+
graph.node('EnhancedOutputs', enhanced, **styles['output'])
|
64 |
+
|
65 |
+
# Add edges
|
66 |
+
graph.edge('Start', 'Upload')
|
67 |
+
graph.edge('Upload', 'JobDesc')
|
68 |
+
graph.edge('JobDesc', 'ApiKey')
|
69 |
+
graph.edge('ApiKey', 'ChooseAction')
|
70 |
+
graph.edge('ChooseAction', 'AnalysisType', 'Analyze')
|
71 |
+
graph.edge('ChooseAction', 'Enhancement', 'Enhance')
|
72 |
+
graph.edge('AnalysisType', 'QuickAnalysis', 'Quick')
|
73 |
+
graph.edge('AnalysisType', 'InDepthAnalysis', 'In-Depth')
|
74 |
+
graph.edge('QuickAnalysis', 'Results')
|
75 |
+
graph.edge('InDepthAnalysis', 'Results')
|
76 |
+
graph.edge('Enhancement', 'EnhancedOutputs')
|
77 |
+
graph.edge('Results', 'ChooseAction', 'New Analysis')
|
78 |
+
graph.edge('EnhancedOutputs', 'ChooseAction', 'New Analysis')
|
79 |
+
|
80 |
+
# Graph settings
|
81 |
+
graph.attr(rankdir='TB', splines='ortho')
|
82 |
+
|
83 |
+
return graph
|
latex.txt
ADDED
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
\documentclass[a4paper, 8pt]{article}
|
3 |
+
\usepackage[margin=0.25in]{geometry}
|
4 |
+
\usepackage{array}
|
5 |
+
\usepackage{enumitem}
|
6 |
+
\usepackage{hyperref}
|
7 |
+
\usepackage{xcolor}
|
8 |
+
\usepackage[scaled=0.9]{helvet}
|
9 |
+
\usepackage{sfmath}
|
10 |
+
\renewcommand{\familydefault}{\sfdefault}
|
11 |
+
\renewcommand{\seriesdefault}{\mddefault}
|
12 |
+
\renewcommand{\shapedefault}{\updefault}
|
13 |
+
|
14 |
+
\setlength{\parindent}{0pt}
|
15 |
+
\setlength{\parskip}{1pt}
|
16 |
+
|
17 |
+
\newcommand{\resumeSection}[1]{\vspace{1pt}\textbf{\normalsize #1}\vspace{1pt}\hrule\vspace{1pt}}
|
18 |
+
\newcommand{\resumeSubsection}[2]{\vspace{0.0pt}\textbf{\scriptsize #1} \hfill \scriptsize #2 \vspace{0.0pt}\hrule}
|
19 |
+
|
20 |
+
\begin{document}
|
21 |
+
|
22 |
+
\begin{center}
|
23 |
+
\textbf{\Large SAI NIHAR REDDY PALEM} \\
|
24 |
+
\small\href{mailto:[email protected]}{[email protected]} $\mid$ (562)-822-7482 $\mid$ San Jose, CA $\mid$
|
25 |
+
\href{https://github.com/niharpalem}{GitHub} $\mid$ \href{https://www.linkedin.com/in/nihar-palem-1b955a183/}{LinkedIn} $\mid$ \href{https://nihar-palem.medium.com/}{Medium} $\mid$ \href{https://huggingface.co/spaces/Niharmahesh/Portfolio}{Portfolio}
|
26 |
+
\end{center}
|
27 |
+
|
28 |
+
\resumeSection{Summary}
|
29 |
+
{\small As an AI engineer with a strong foundation in machine learning and data engineering, I specialize in developing robust data pipelines, implementing efficient annotation systems, and creating comprehensive evaluation metrics to enhance AI model performance. My experience spans from large-scale data processing to cutting-edge model development using technologies like PyTorch and TensorFlow. I thrive in collaborative, fast-paced environments where I can apply my problem-solving skills to drive innovative AI solutions. With a passion for advancing AI in creative domains, I am dedicated to pushing the boundaries of what's possible in AI-assisted design and making these tools accessible to all users.}
|
30 |
+
|
31 |
+
\resumeSection{Education}
|
32 |
+
{\small
|
33 |
+
\textbf{San Jose State University, California, USA} \hfill \textit{Jan 2023 -- Dec 2024}\\
|
34 |
+
\textit{Master of Science, Data Analytics}\\
|
35 |
+
\small{Relevant Coursework: Machine Learning, Deep Learning, Big Data Analytics, Mathematics for Data analysis}
|
36 |
+
|
37 |
+
\textbf{Sreenidhi Institute of Science and Technology, Hyderabad, India} \hfill \textit{June 2015 -- June 2019}\\
|
38 |
+
\textit{Bachelor of Technology, Electrical and Electronics Engineering (EEE)}
|
39 |
+
}
|
40 |
+
|
41 |
+
\resumeSection{Technical Skills}
|
42 |
+
{\small
|
43 |
+
\begin{itemize}[itemsep=0pt, leftmargin=*]
|
44 |
+
\item \textbf{AI/ML Infrastructure:} PyTorch, TensorFlow, Scikit-Learn, Apache Spark, Docker, Model Development Pipelines
|
45 |
+
\item \textbf{Machine Learning:} Deep Learning, Computer Vision, NLP, Reinforcement Learning, Vision Transformers, LLMs (GPT, LLaMA)
|
46 |
+
\item \textbf{Data Engineering:} ETL/ELT Pipelines, Apache Airflow, Data Annotation Systems, CRISP-DM methodology
|
47 |
+
\item \textbf{Cloud \& Big Data:} AWS (Certified), GCP (BigQuery, Cloud Composer, Cloud Storage), Snowflake, AWS Redshift
|
48 |
+
\item \textbf{Data Visualization:} Tableau, PowerBI, Seaborn, Matplotlib, Streamlit
|
49 |
+
\item \textbf{Databases:} MySQL, MongoDB, Snowflake, BigQuery
|
50 |
+
\item \textbf{Programming Languages:} Python (Advanced), SQL (Advanced)
|
51 |
+
\end{itemize}
|
52 |
+
}
|
53 |
+
|
54 |
+
|
55 |
+
\resumeSection{Professional Experience}
|
56 |
+
{\small
|
57 |
+
\textbf{San Jose State University, San Jose} \hfill \textit{Aug 2024 -- Dec 2024}\\
|
58 |
+
\textit{Instructional Student Assistant}
|
59 |
+
\begin{itemize}[itemsep=0pt, leftmargin=*]
|
60 |
+
\item Improved student project implementation efficiency by 30\% through comprehensive feedback and technical guidance.
|
61 |
+
\item Reviewed and debugged student data pipelines, offering solutions for complex technical challenges in data analysis and ML model optimization.
|
62 |
+
\end{itemize}
|
63 |
+
|
64 |
+
\textbf{Bharat Electronics Limited, Hyderabad} \hfill \textit{Feb 2021 -- Mar 2022}\\
|
65 |
+
\textit{Data Analyst}
|
66 |
+
\begin{itemize}[itemsep=0pt, leftmargin=*]
|
67 |
+
\item Optimized SQL queries for multi-million row defense databases, improving analysis efficiency by 40\% across Navy, Air Force, and international defense project analytics.
|
68 |
+
\item Developed and maintained 100+ Power BI dashboards tracking defense electronics sales, production costs, and project metrics, enabling data-driven decisions for senior management.
|
69 |
+
\item Implemented data standardization protocols using SQL triggers, ensuring consistency in currency conversions and text formatting.
|
70 |
+
\end{itemize}
|
71 |
+
}
|
72 |
+
|
73 |
+
\resumeSection{Projects and Achievements}
|
74 |
+
{\small
|
75 |
+
\textbf{Multi-Agent Job Search System} \href{https://huggingface.co/spaces/Niharmahesh/Multi_Agent_Job_search_and_match}{\textcolor{gray}{Application}}
|
76 |
+
\hfill \textit{Jan 2025}
|
77 |
+
\begin{itemize}[itemsep=0pt, leftmargin=*]
|
78 |
+
\item Engineered dual-agent platform using LLaMA models (8B for parameter extraction and resume summarization, 70B for matching), implementing real-time web scraping across LinkedIn, Glassdoor, and Indeed with automated batch processing of 60+ jobs per search.
|
79 |
+
\item Developed intelligent matching system with resume summarization and job compatibility scoring, achieving 70\% reduction in search time through optimized prompt engineering and structured data processing.
|
80 |
+
\end{itemize}
|
81 |
+
|
82 |
+
\textbf{Job Easz Data Collector}
|
83 |
+
\href{https://huggingface.co/spaces/Niharmahesh/job_easz}{\textcolor{gray}{Application}}
|
84 |
+
\hfill \textit{Dec 2024}
|
85 |
+
\begin{itemize}[itemsep=0pt, leftmargin=*]
|
86 |
+
\item Developed an automated data collection pipeline using Apache Airflow, scraping 10000+ job postings daily from LinkedIn, Glassdoor, Indeed, and Google Jobs
|
87 |
+
with a combination of roles and locations (1500+), storing data in Hugging Face datasets for open-source accessibility.
|
88 |
+
\item Created an interactive dashboard featuring time series analysis of job postings and role-based trends, with search and filter functionalities.
|
89 |
+
\item Enabled users to access and analyze job market data through an open-source application, promoting data-driven decision making in career planning.
|
90 |
+
\end{itemize}
|
91 |
+
|
92 |
+
\textbf{Sign Language Assistant}
|
93 |
+
\href{https://huggingface.co/spaces/Niharmahesh/slr-easz}{\textcolor{gray}{Application}}
|
94 |
+
\hfill \textit{Aug 2024 - Oct 2024}
|
95 |
+
\begin{itemize}[itemsep=0pt, leftmargin=*]
|
96 |
+
\item Built real-time ASL translation system using Google's Mediapipe Model and Random Forest classifier with optimized gesture recognition.
|
97 |
+
\item Achieved 95\% accuracy for 19 alphabets while implementing interactive learning features with immediate feedback mechanisms.
|
98 |
+
\end{itemize}
|
99 |
+
|
100 |
+
\textbf{National Infrastructure Monitoring}
|
101 |
+
\href{https://huggingface.co/spaces/Niharmahesh/Data298}{\textcolor{gray}{Application}}
|
102 |
+
\hfill \textit{Jan 2024 - Dec 2024}
|
103 |
+
\begin{itemize}[itemsep=0pt, leftmargin=*]
|
104 |
+
\item Developed Vision Transformer-based satellite imagery analysis system processing 40GB dataset with custom CV models for infrastructure monitoring.
|
105 |
+
\item Achieved 85\% accuracy in change detection and deployed interactive temporal analysis interface on Hugging Face Spaces.
|
106 |
+
\end{itemize}
|
107 |
+
|
108 |
+
\textbf{Stock Market Chatbot}
|
109 |
+
\hfill \textit{Aug 2023 - Dec 2023}
|
110 |
+
\begin{itemize}[itemsep=0pt, leftmargin=*]
|
111 |
+
\item Created bilingual financial analysis chatbot integrating GPT-3.5 with yfinance API for real-time market insights and personalized recommendations.
|
112 |
+
\item Engineered high-performance backend using Apache Spark and Snowflake, achieving 30\% query optimization with sub-5 second response time.
|
113 |
+
\end{itemize}
|
114 |
+
|
115 |
+
\textbf{Twitter Trend Analysis with BigQuery} \hfill \textit{Jan 2023 - Apr 2023}
|
116 |
+
\begin{itemize}[itemsep=0pt, leftmargin=*]
|
117 |
+
\item Engineered GCP-based data pipeline using Cloud Composer and BigQuery, implementing OLAP analytics with Star Schema for processing 10k daily tweets.
|
118 |
+
\item Developed automated ETL workflows with custom Python operators for data transformation, achieving 40\% improvement in processing efficiency.
|
119 |
+
\end{itemize}
|
120 |
+
}
|
121 |
+
|
122 |
+
\resumeSection{Certifications}
|
123 |
+
{\small
|
124 |
+
\begin{itemize}[itemsep=0pt, leftmargin=*]
|
125 |
+
\item \textbf{AWS Certified Cloud Practitioner:} Validated knowledge of AWS services, security, and architectural principles (Jan 2024).
|
126 |
+
\end{itemize}
|
127 |
+
}
|
128 |
+
|
129 |
+
\end{document}
|
main.py
ADDED
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import groq
|
3 |
+
from prompts import analyze_job_fit, optimize_latex_resume,evaluate_resume,generate_cover_letter
|
4 |
+
import json
|
5 |
+
|
6 |
+
|
7 |
+
def main():
|
8 |
+
st.set_page_config(
|
9 |
+
layout="wide",
|
10 |
+
page_title="Resume Enhancer",
|
11 |
+
initial_sidebar_state="collapsed"
|
12 |
+
)
|
13 |
+
file_path = 'latex.txt'
|
14 |
+
with open(file_path, 'r') as f:
|
15 |
+
latex_template = f.read()
|
16 |
+
|
17 |
+
MODELS = [
|
18 |
+
"deepseek-r1-distill-llama-70b",
|
19 |
+
"gemma2-9b-it",
|
20 |
+
"llama-3.2-1b-preview",
|
21 |
+
"llama-3.2-3b-preview",
|
22 |
+
"llama-3.3-70b-versatile",
|
23 |
+
"llama-guard-3-8b",
|
24 |
+
"llama3-70b-8192",
|
25 |
+
"mixtral-8x7b-32768"]
|
26 |
+
|
27 |
+
# Custom CSS with reduced text sizes
|
28 |
+
st.markdown("""
|
29 |
+
<style>
|
30 |
+
.block-container {
|
31 |
+
padding-top: 1.5rem;
|
32 |
+
padding-bottom: 1.5rem;
|
33 |
+
max-width: 1200px;
|
34 |
+
}
|
35 |
+
.stButton>button {
|
36 |
+
background-color: #2563eb;
|
37 |
+
color: white;
|
38 |
+
border-radius: 0.375rem;
|
39 |
+
padding: 0.75rem 1.5rem;
|
40 |
+
border: none;
|
41 |
+
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.05);
|
42 |
+
margin: 0.5rem;
|
43 |
+
min-width: 200px;
|
44 |
+
font-size: 0.875rem;
|
45 |
+
}
|
46 |
+
[data-testid="stFileUploader"] {
|
47 |
+
border: 2px dashed #e5e7eb;
|
48 |
+
border-radius: 0.5rem;
|
49 |
+
padding: 0.875rem;
|
50 |
+
min-height: 220px;
|
51 |
+
font-size: 0.875rem;
|
52 |
+
}
|
53 |
+
.stTextArea>div>div {
|
54 |
+
border-radius: 0.5rem;
|
55 |
+
min-height: 220px !important;
|
56 |
+
font-size: 0.875rem;
|
57 |
+
}
|
58 |
+
.stTextInput>div>div>input {
|
59 |
+
border-radius: 0.5rem;
|
60 |
+
font-size: 0.875rem;
|
61 |
+
}
|
62 |
+
.resume-html {
|
63 |
+
padding: 1.5rem;
|
64 |
+
max-width: 800px;
|
65 |
+
margin: 0 auto;
|
66 |
+
background: white;
|
67 |
+
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
68 |
+
border-radius: 0.5rem;
|
69 |
+
font-size: 0.875rem;
|
70 |
+
}
|
71 |
+
h1 {font-size: 3rem !important; /* Adjust this value to increase the font size */
|
72 |
+
} h2 {font-size: 1.5rem !important; /* Adjust this value to increase the font size */
|
73 |
+
h3, h4, h5, h6 {
|
74 |
+
font-size: 80% !important;
|
75 |
+
}
|
76 |
+
p, li {
|
77 |
+
font-size: 0.875rem !important;
|
78 |
+
}
|
79 |
+
</style>
|
80 |
+
""", unsafe_allow_html=True)
|
81 |
+
|
82 |
+
# Header with smaller text
|
83 |
+
st.markdown("""
|
84 |
+
<h1 style='text-align: center; font-size: 2.5rem; font-weight: 800; margin-bottom: 0.875rem;'>
|
85 |
+
Resume Easz
|
86 |
+
</h1>
|
87 |
+
""", unsafe_allow_html=True)
|
88 |
+
st.markdown("""
|
89 |
+
<h2 style='text-align: center; font-size: 1.5rem; font-weight: 400; margin-bottom: 0.875rem;'>
|
90 |
+
Analyze and Enhance Your Resume with AI
|
91 |
+
</h2>
|
92 |
+
""", unsafe_allow_html=True)
|
93 |
+
st.markdown("---")
|
94 |
+
# Initialize variables
|
95 |
+
resume_text = None
|
96 |
+
job_description = None
|
97 |
+
original_file = None
|
98 |
+
|
99 |
+
# Side-by-side inputs with equal width
|
100 |
+
col1, col2 = st.columns(2)
|
101 |
+
|
102 |
+
with col1:
|
103 |
+
st.markdown("##### Upload your biodata as JSON file")
|
104 |
+
resume = st.file_uploader(
|
105 |
+
"Drop your resume file here",
|
106 |
+
type=['json']
|
107 |
+
)
|
108 |
+
if resume is not None:
|
109 |
+
resume_text = json.load(resume)
|
110 |
+
|
111 |
+
with col2:
|
112 |
+
st.markdown("##### Job Description")
|
113 |
+
job_description = st.text_area(
|
114 |
+
"Paste job description",
|
115 |
+
placeholder="Paste the job description here...",
|
116 |
+
height=220,
|
117 |
+
label_visibility="collapsed"
|
118 |
+
)
|
119 |
+
|
120 |
+
# Centered API key input first
|
121 |
+
key = st.text_input(
|
122 |
+
"GROQ API Key",
|
123 |
+
type="password",
|
124 |
+
placeholder="Enter your GROQ API key...",
|
125 |
+
help="Your API key will not be stored"
|
126 |
+
)
|
127 |
+
|
128 |
+
selected_model =st.selectbox("Select Model", MODELS)
|
129 |
+
st.markdown("The selected model will be used for the major tasks such as latex code generation for resume and cover letter")
|
130 |
+
if key:
|
131 |
+
client = groq.Client(api_key=key)
|
132 |
+
|
133 |
+
|
134 |
+
# Centered action buttons
|
135 |
+
col_buttons, _, _ = st.columns([200,1,1])
|
136 |
+
with col_buttons:
|
137 |
+
col_b1, col_b2, col_b3 = st.columns(3)
|
138 |
+
|
139 |
+
process_quick = col_b1.button("Quick Analysis")
|
140 |
+
generate_reusme = col_b2.button("Generate Latex code for Resume")
|
141 |
+
geenrate_coverletter = col_b3.button("Generate latex code Cover Letter")
|
142 |
+
|
143 |
+
#to check if any of the buttons are clicked
|
144 |
+
if any([process_quick, generate_reusme, generate_cover_letter]):
|
145 |
+
if not resume_text:
|
146 |
+
st.error("Please upload your resume data in JSON format.")
|
147 |
+
elif not job_description:
|
148 |
+
st.error("Please paste provide the job description.")
|
149 |
+
else:
|
150 |
+
try:
|
151 |
+
with st.spinner("Processing your resume..."):
|
152 |
+
if process_quick:
|
153 |
+
analysis = analyze_job_fit(client, resume_text, job_description)
|
154 |
+
st.markdown("### Quick Analysis Results")
|
155 |
+
st.markdown(analysis)
|
156 |
+
elif generate_reusme:
|
157 |
+
with st.spinner(f"Optimizing LaTeX Resume using {selected_model}..."):
|
158 |
+
analysis = analyze_job_fit(client, resume_text, job_description)
|
159 |
+
optimized_latex = optimize_latex_resume(
|
160 |
+
client, analysis, latex_template, resume_text, selected_model)
|
161 |
+
st.subheader("Optimized LaTeX Resume")
|
162 |
+
st.code(optimized_latex, language="latex")
|
163 |
+
resume_evalaution = evaluate_resume(client, latex_template, optimized_latex, job_description, selected_model)
|
164 |
+
st.subheader("Resume Evaluation")
|
165 |
+
st.markdown(resume_evalaution)
|
166 |
+
else: # generate cover letter
|
167 |
+
cv_latex = generate_cover_letter(client, resume_text, job_description, selected_model)
|
168 |
+
st.subheader("latex code for Cover Letter")
|
169 |
+
st.code(cv_latex, language="latex")
|
170 |
+
except groq.RateLimitError as e:
|
171 |
+
st.error("API rate limit exceeded. Please try again later or use a different API key.")
|
172 |
+
except Exception as e:
|
173 |
+
st.error(f"An error occurred: {e}")
|
174 |
+
else:
|
175 |
+
st.info("👆 Please enter your GROQ API key to get started.")
|
176 |
+
|
177 |
+
if __name__ == "__main__":
|
178 |
+
main()
|
prompts.py
ADDED
@@ -0,0 +1,206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
|
3 |
+
def analyze_job_fit(_client, json_data, job_description):
|
4 |
+
prompt = f"""
|
5 |
+
As an expert resume analyst, create a comprehensive optimization strategy for the given JSON resume data to match the job description. Your task:
|
6 |
+
|
7 |
+
1. Parse the JSON resume thoroughly, extracting all key sections: work experience, skills, education, and projects.
|
8 |
+
2. Map resume sections directly to job description requirements.
|
9 |
+
3. Identify exact skill and experience matches.
|
10 |
+
4. Determine the most relevant professional experiences.
|
11 |
+
5. Create a targeted alignment strategy.
|
12 |
+
6. Suggest relevant keywords from the job description to be added where appropriate.
|
13 |
+
7. Propose a new arrangement of sections (especially projects and skills) to better match the job description.
|
14 |
+
|
15 |
+
Important: Do not remove any information from the original JSON data. Instead, focus on rearranging and enhancing the content.
|
16 |
+
|
17 |
+
Resume Data:
|
18 |
+
{json.dumps(json_data)}
|
19 |
+
|
20 |
+
Job Description:
|
21 |
+
{job_description}
|
22 |
+
|
23 |
+
Provide a structured, data-driven optimization strategy, including:
|
24 |
+
1. Skill overlap percentage
|
25 |
+
2. Recommended content modifications
|
26 |
+
3. Suggested keyword additions
|
27 |
+
4. Proposed section rearrangement
|
28 |
+
5. A 3-line summary (tailored to the role):
|
29 |
+
→ This is the candidate's elevator pitch. Highlight their biggest achievements and skills that prove they're the right fit for the role. Make it sound natural, as if the candidate wrote it themselves. Be formal but conversational. Don't mention specific company names.
|
30 |
+
|
31 |
+
For the summary, focus on making it sound authentic, human, and tailored to the specific role. Keep it concise and impactful.
|
32 |
+
|
33 |
+
Additionally, suggest how to consolidate skills sections if there are multiple, aiming for about 5 main categories under skills. Don't combine all sections, but identify opportunities to group related skills effectively.
|
34 |
+
|
35 |
+
Emphasize the importance of keywords throughout the optimization strategy.
|
36 |
+
"""
|
37 |
+
|
38 |
+
response = _client.chat.completions.create(
|
39 |
+
model="llama3-8b-8192",
|
40 |
+
messages=[{"role": "user", "content": prompt}],
|
41 |
+
max_tokens=4000,
|
42 |
+
temperature=0.2
|
43 |
+
)
|
44 |
+
|
45 |
+
return response.choices[0].message.content
|
46 |
+
|
47 |
+
def optimize_latex_resume(_client, job_analysis, original_latex, json_data, selected_model):
|
48 |
+
prompt = f"""
|
49 |
+
As an expert resume writer with 15 years of experience, optimize the given LaTeX resume based on the job analysis and JSON data. Your task:
|
50 |
+
|
51 |
+
1. Generate a complete, ready-to-use LaTeX code for the optimized resume.
|
52 |
+
2. Use the existing LaTeX template as a base, but modify it according to the optimization strategy.
|
53 |
+
3. Incorporate all data from the JSON, rearranging sections as suggested in the job analysis.
|
54 |
+
4. Add relevant keywords from the job description where appropriate, ensuring high keyword density.
|
55 |
+
5. Include the 3-line summary at the top of the resume, formatted appropriately in LaTeX.
|
56 |
+
6. Ensure no information from the original JSON is omitted.
|
57 |
+
7. Consolidate skills sections as suggested in the job analysis, aiming for about 5 main categories.
|
58 |
+
|
59 |
+
Follow these specific rules:
|
60 |
+
- Preserve the overall style and formatting of the original template.
|
61 |
+
- Reorder sections based on job relevance.
|
62 |
+
- Use exact phrases from the JSON data where possible.
|
63 |
+
- Maximize keyword matching with the job description.
|
64 |
+
- Optimize section weights according to the job analysis.
|
65 |
+
- Ensure the skills section is well-organized and keyword-rich.
|
66 |
+
- Rearrange the projects and skills sections to better match the job description and if needed try to remove atmost 1 irrelevant projects.
|
67 |
+
|
68 |
+
Job Fit Analysis: {job_analysis}
|
69 |
+
Original LaTeX Template: {original_latex}
|
70 |
+
JSON Resume Data: {json.dumps(json_data)}
|
71 |
+
|
72 |
+
Provide the complete, optimized LaTeX code ready for compilation only the code no comments or explanations.
|
73 |
+
"""
|
74 |
+
|
75 |
+
response = _client.chat.completions.create(
|
76 |
+
model=selected_model,
|
77 |
+
messages=[{"role": "user", "content": prompt}],
|
78 |
+
max_tokens=8000,
|
79 |
+
temperature=0.1
|
80 |
+
)
|
81 |
+
|
82 |
+
return response.choices[0].message.content
|
83 |
+
|
84 |
+
def evaluate_resume(_client, original_latex, optimized_latex, job_description, selected_model):
|
85 |
+
prompt = f"""
|
86 |
+
Provide a concise, data-driven comparison of the original and optimized resumes.
|
87 |
+
Focus on key improvements and use a structured format. Compare:
|
88 |
+
1. Keyword match percentage
|
89 |
+
2. Section relevance
|
90 |
+
3. Achievement descriptions
|
91 |
+
4. Professional positioning
|
92 |
+
5. Job description alignment
|
93 |
+
|
94 |
+
Provide a short, quantitative assessment with clear, measurable improvements.
|
95 |
+
Highlight top 3 key enhancements and any potential areas for further refinement.
|
96 |
+
keep everything crisp and to the point.
|
97 |
+
|
98 |
+
Original Resume: {original_latex}
|
99 |
+
Optimized Resume: {optimized_latex}
|
100 |
+
Job Description: {job_description}
|
101 |
+
"""
|
102 |
+
|
103 |
+
response = _client.chat.completions.create(
|
104 |
+
model="llama3-8b-8192",
|
105 |
+
messages=[{"role": "user", "content": prompt}],
|
106 |
+
max_tokens=3000,
|
107 |
+
temperature=0.1
|
108 |
+
)
|
109 |
+
|
110 |
+
return response.choices[0].message.content
|
111 |
+
def generate_cover_letter(_client, json_data, job_description, selected_model):
|
112 |
+
prompt = f"""
|
113 |
+
As an expert LaTeX cover letter writer, follow these structured steps to create a professional cover letter:
|
114 |
+
|
115 |
+
Step 1: Extract key information from job description:
|
116 |
+
- Company name
|
117 |
+
- Role title
|
118 |
+
- Key responsibilities (top 3)
|
119 |
+
- Required qualifications
|
120 |
+
- Department/team name if mentioned
|
121 |
+
|
122 |
+
Job Description for analysis:
|
123 |
+
{job_description}
|
124 |
+
|
125 |
+
Step 2: Analyze JSON resume to identify:
|
126 |
+
- Most relevant experiences matching job requirements
|
127 |
+
- Quantifiable achievements that align with role
|
128 |
+
- Technical skills that match job needs
|
129 |
+
- Educational background relevance
|
130 |
+
|
131 |
+
Resume Data:
|
132 |
+
{json.dumps(json_data)}
|
133 |
+
|
134 |
+
Step 3: Generate a professional cover letter using this exact LaTeX template:
|
135 |
+
|
136 |
+
\\documentclass[10pt,a4paper]{{letter}}
|
137 |
+
\\usepackage[utf8]{{inputenc}}
|
138 |
+
\\usepackage[T1]{{fontenc}}
|
139 |
+
\\usepackage{{geometry}}
|
140 |
+
\\usepackage{{parskip}}
|
141 |
+
\\usepackage{{microtype}}
|
142 |
+
\\usepackage[hidelinks]{{hyperref}}
|
143 |
+
|
144 |
+
% Set margins
|
145 |
+
\\geometry{{
|
146 |
+
top=0.8in,
|
147 |
+
bottom=0.8in,
|
148 |
+
left=0.8in,
|
149 |
+
right=0.8in
|
150 |
+
}}
|
151 |
+
|
152 |
+
\\begin{{document}}
|
153 |
+
\\begin{{letter}}{{Hiring Manager, [Insert extracted company name here]}}
|
154 |
+
\\opening{{Dear Hiring Manager,}}
|
155 |
+
|
156 |
+
[Generate 4 paragraphs following this structure:]
|
157 |
+
Paragraph 1 (Opening - 3-4 sentences):
|
158 |
+
- Strong hook mentioning company name and role
|
159 |
+
- Brief statement of key qualification
|
160 |
+
- Expression of genuine interest
|
161 |
+
|
162 |
+
Paragraph 2 (Experience - 4-5 sentences):
|
163 |
+
- 2-3 most relevant achievements with metrics
|
164 |
+
- Direct connection to job requirements
|
165 |
+
- Demonstration of technical skills
|
166 |
+
|
167 |
+
Paragraph 3 (Company Knowledge - 3-4 sentences):
|
168 |
+
- Show research about company
|
169 |
+
- Connect your values to company mission
|
170 |
+
- Explain why this specific role interests you
|
171 |
+
|
172 |
+
Paragraph 4 (Closing - 2-3 sentences):
|
173 |
+
- Confident statement about contribution potential
|
174 |
+
- Clear call to action
|
175 |
+
- Professional thank you
|
176 |
+
|
177 |
+
\\closing{{Sincerely,}}
|
178 |
+
\\vspace{{-5em}}
|
179 |
+
[Candidate full name] \\\\
|
180 |
+
[Email] \\\\
|
181 |
+
[Phone] \\\\
|
182 |
+
[Location]
|
183 |
+
\\end{{letter}}
|
184 |
+
\\end{{document}}
|
185 |
+
|
186 |
+
Requirements:
|
187 |
+
1. Maximum 350 words for main content
|
188 |
+
2. Use active voice and professional tone
|
189 |
+
3. Include at least 3 quantifiable achievements
|
190 |
+
4. Reference minimum 2 specific company details
|
191 |
+
5. Maintain proper paragraph spacing
|
192 |
+
6. Ensure all contact details match resume exactly
|
193 |
+
7. Include relevant keywords from job description
|
194 |
+
8. Keep technical details balanced with soft skills
|
195 |
+
|
196 |
+
Return only the complete LaTeX code with no additional text or explanations.
|
197 |
+
"""
|
198 |
+
|
199 |
+
response = _client.chat.completions.create(
|
200 |
+
model=selected_model,
|
201 |
+
messages=[{"role": "user", "content": prompt}],
|
202 |
+
max_tokens=2000,
|
203 |
+
temperature=0.3
|
204 |
+
)
|
205 |
+
|
206 |
+
return response.choices[0].message.content
|
requirements.txt
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
streamlit
|
3 |
+
groq
|
4 |
+
python-docx
|
5 |
+
docx2txt
|
6 |
+
fpdf
|
7 |
+
graphviz
|
8 |
+
diff-match-patch
|
9 |
+
pdf2image
|
10 |
+
|
venv/.gitignore
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
# Created by venv; see https://docs.python.org/3/library/venv.html
|
2 |
+
*
|
venv/bin/Activate.ps1
ADDED
@@ -0,0 +1,248 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<#
|
2 |
+
.Synopsis
|
3 |
+
Activate a Python virtual environment for the current PowerShell session.
|
4 |
+
|
5 |
+
.Description
|
6 |
+
Pushes the python executable for a virtual environment to the front of the
|
7 |
+
$Env:PATH environment variable and sets the prompt to signify that you are
|
8 |
+
in a Python virtual environment. Makes use of the command line switches as
|
9 |
+
well as the `pyvenv.cfg` file values present in the virtual environment.
|
10 |
+
|
11 |
+
.Parameter VenvDir
|
12 |
+
Path to the directory that contains the virtual environment to activate. The
|
13 |
+
default value for this is the parent of the directory that the Activate.ps1
|
14 |
+
script is located within.
|
15 |
+
|
16 |
+
.Parameter Prompt
|
17 |
+
The prompt prefix to display when this virtual environment is activated. By
|
18 |
+
default, this prompt is the name of the virtual environment folder (VenvDir)
|
19 |
+
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
20 |
+
|
21 |
+
.Example
|
22 |
+
Activate.ps1
|
23 |
+
Activates the Python virtual environment that contains the Activate.ps1 script.
|
24 |
+
|
25 |
+
.Example
|
26 |
+
Activate.ps1 -Verbose
|
27 |
+
Activates the Python virtual environment that contains the Activate.ps1 script,
|
28 |
+
and shows extra information about the activation as it executes.
|
29 |
+
|
30 |
+
.Example
|
31 |
+
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
32 |
+
Activates the Python virtual environment located in the specified location.
|
33 |
+
|
34 |
+
.Example
|
35 |
+
Activate.ps1 -Prompt "MyPython"
|
36 |
+
Activates the Python virtual environment that contains the Activate.ps1 script,
|
37 |
+
and prefixes the current prompt with the specified string (surrounded in
|
38 |
+
parentheses) while the virtual environment is active.
|
39 |
+
|
40 |
+
.Notes
|
41 |
+
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
42 |
+
execution policy for the user. You can do this by issuing the following PowerShell
|
43 |
+
command:
|
44 |
+
|
45 |
+
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
46 |
+
|
47 |
+
For more information on Execution Policies:
|
48 |
+
https://go.microsoft.com/fwlink/?LinkID=135170
|
49 |
+
|
50 |
+
#>
|
51 |
+
Param(
|
52 |
+
[Parameter(Mandatory = $false)]
|
53 |
+
[String]
|
54 |
+
$VenvDir,
|
55 |
+
[Parameter(Mandatory = $false)]
|
56 |
+
[String]
|
57 |
+
$Prompt
|
58 |
+
)
|
59 |
+
|
60 |
+
<# Function declarations --------------------------------------------------- #>
|
61 |
+
|
62 |
+
<#
|
63 |
+
.Synopsis
|
64 |
+
Remove all shell session elements added by the Activate script, including the
|
65 |
+
addition of the virtual environment's Python executable from the beginning of
|
66 |
+
the PATH variable.
|
67 |
+
|
68 |
+
.Parameter NonDestructive
|
69 |
+
If present, do not remove this function from the global namespace for the
|
70 |
+
session.
|
71 |
+
|
72 |
+
#>
|
73 |
+
function global:deactivate ([switch]$NonDestructive) {
|
74 |
+
# Revert to original values
|
75 |
+
|
76 |
+
# The prior prompt:
|
77 |
+
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
78 |
+
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
79 |
+
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
80 |
+
}
|
81 |
+
|
82 |
+
# The prior PYTHONHOME:
|
83 |
+
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
84 |
+
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
85 |
+
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
86 |
+
}
|
87 |
+
|
88 |
+
# The prior PATH:
|
89 |
+
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
90 |
+
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
91 |
+
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
92 |
+
}
|
93 |
+
|
94 |
+
# Just remove the VIRTUAL_ENV altogether:
|
95 |
+
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
96 |
+
Remove-Item -Path env:VIRTUAL_ENV
|
97 |
+
}
|
98 |
+
|
99 |
+
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
100 |
+
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
101 |
+
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
102 |
+
}
|
103 |
+
|
104 |
+
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
105 |
+
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
106 |
+
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
107 |
+
}
|
108 |
+
|
109 |
+
# Leave deactivate function in the global namespace if requested:
|
110 |
+
if (-not $NonDestructive) {
|
111 |
+
Remove-Item -Path function:deactivate
|
112 |
+
}
|
113 |
+
}
|
114 |
+
|
115 |
+
<#
|
116 |
+
.Description
|
117 |
+
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
118 |
+
given folder, and returns them in a map.
|
119 |
+
|
120 |
+
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
121 |
+
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
122 |
+
then it is considered a `key = value` line. The left hand string is the key,
|
123 |
+
the right hand is the value.
|
124 |
+
|
125 |
+
If the value starts with a `'` or a `"` then the first and last character is
|
126 |
+
stripped from the value before being captured.
|
127 |
+
|
128 |
+
.Parameter ConfigDir
|
129 |
+
Path to the directory that contains the `pyvenv.cfg` file.
|
130 |
+
#>
|
131 |
+
function Get-PyVenvConfig(
|
132 |
+
[String]
|
133 |
+
$ConfigDir
|
134 |
+
) {
|
135 |
+
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
136 |
+
|
137 |
+
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
138 |
+
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
139 |
+
|
140 |
+
# An empty map will be returned if no config file is found.
|
141 |
+
$pyvenvConfig = @{ }
|
142 |
+
|
143 |
+
if ($pyvenvConfigPath) {
|
144 |
+
|
145 |
+
Write-Verbose "File exists, parse `key = value` lines"
|
146 |
+
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
147 |
+
|
148 |
+
$pyvenvConfigContent | ForEach-Object {
|
149 |
+
$keyval = $PSItem -split "\s*=\s*", 2
|
150 |
+
if ($keyval[0] -and $keyval[1]) {
|
151 |
+
$val = $keyval[1]
|
152 |
+
|
153 |
+
# Remove extraneous quotations around a string value.
|
154 |
+
if ("'""".Contains($val.Substring(0, 1))) {
|
155 |
+
$val = $val.Substring(1, $val.Length - 2)
|
156 |
+
}
|
157 |
+
|
158 |
+
$pyvenvConfig[$keyval[0]] = $val
|
159 |
+
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
160 |
+
}
|
161 |
+
}
|
162 |
+
}
|
163 |
+
return $pyvenvConfig
|
164 |
+
}
|
165 |
+
|
166 |
+
|
167 |
+
<# Begin Activate script --------------------------------------------------- #>
|
168 |
+
|
169 |
+
# Determine the containing directory of this script
|
170 |
+
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
171 |
+
$VenvExecDir = Get-Item -Path $VenvExecPath
|
172 |
+
|
173 |
+
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
174 |
+
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
175 |
+
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
176 |
+
|
177 |
+
# Set values required in priority: CmdLine, ConfigFile, Default
|
178 |
+
# First, get the location of the virtual environment, it might not be
|
179 |
+
# VenvExecDir if specified on the command line.
|
180 |
+
if ($VenvDir) {
|
181 |
+
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
182 |
+
}
|
183 |
+
else {
|
184 |
+
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
185 |
+
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
186 |
+
Write-Verbose "VenvDir=$VenvDir"
|
187 |
+
}
|
188 |
+
|
189 |
+
# Next, read the `pyvenv.cfg` file to determine any required value such
|
190 |
+
# as `prompt`.
|
191 |
+
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
192 |
+
|
193 |
+
# Next, set the prompt from the command line, or the config file, or
|
194 |
+
# just use the name of the virtual environment folder.
|
195 |
+
if ($Prompt) {
|
196 |
+
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
197 |
+
}
|
198 |
+
else {
|
199 |
+
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
200 |
+
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
201 |
+
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
202 |
+
$Prompt = $pyvenvCfg['prompt'];
|
203 |
+
}
|
204 |
+
else {
|
205 |
+
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
206 |
+
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
207 |
+
$Prompt = Split-Path -Path $venvDir -Leaf
|
208 |
+
}
|
209 |
+
}
|
210 |
+
|
211 |
+
Write-Verbose "Prompt = '$Prompt'"
|
212 |
+
Write-Verbose "VenvDir='$VenvDir'"
|
213 |
+
|
214 |
+
# Deactivate any currently active virtual environment, but leave the
|
215 |
+
# deactivate function in place.
|
216 |
+
deactivate -nondestructive
|
217 |
+
|
218 |
+
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
219 |
+
# that there is an activated venv.
|
220 |
+
$env:VIRTUAL_ENV = $VenvDir
|
221 |
+
|
222 |
+
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
223 |
+
|
224 |
+
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
225 |
+
|
226 |
+
Write-Verbose "Setting prompt to '$Prompt'"
|
227 |
+
|
228 |
+
# Set the prompt to include the env name
|
229 |
+
# Make sure _OLD_VIRTUAL_PROMPT is global
|
230 |
+
function global:_OLD_VIRTUAL_PROMPT { "" }
|
231 |
+
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
232 |
+
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
233 |
+
|
234 |
+
function global:prompt {
|
235 |
+
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
236 |
+
_OLD_VIRTUAL_PROMPT
|
237 |
+
}
|
238 |
+
}
|
239 |
+
|
240 |
+
# Clear PYTHONHOME
|
241 |
+
if (Test-Path -Path Env:PYTHONHOME) {
|
242 |
+
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
243 |
+
Remove-Item -Path Env:PYTHONHOME
|
244 |
+
}
|
245 |
+
|
246 |
+
# Add the venv to the PATH
|
247 |
+
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
248 |
+
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
venv/bin/activate
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file must be used with "source bin/activate" *from bash*
|
2 |
+
# You cannot run it directly
|
3 |
+
|
4 |
+
deactivate () {
|
5 |
+
# reset old environment variables
|
6 |
+
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
7 |
+
PATH="${_OLD_VIRTUAL_PATH:-}"
|
8 |
+
export PATH
|
9 |
+
unset _OLD_VIRTUAL_PATH
|
10 |
+
fi
|
11 |
+
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
12 |
+
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
13 |
+
export PYTHONHOME
|
14 |
+
unset _OLD_VIRTUAL_PYTHONHOME
|
15 |
+
fi
|
16 |
+
|
17 |
+
# Call hash to forget past locations. Without forgetting
|
18 |
+
# past locations the $PATH changes we made may not be respected.
|
19 |
+
# See "man bash" for more details. hash is usually a builtin of your shell
|
20 |
+
hash -r 2> /dev/null
|
21 |
+
|
22 |
+
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
23 |
+
PS1="${_OLD_VIRTUAL_PS1:-}"
|
24 |
+
export PS1
|
25 |
+
unset _OLD_VIRTUAL_PS1
|
26 |
+
fi
|
27 |
+
|
28 |
+
unset VIRTUAL_ENV
|
29 |
+
unset VIRTUAL_ENV_PROMPT
|
30 |
+
if [ ! "${1:-}" = "nondestructive" ] ; then
|
31 |
+
# Self destruct!
|
32 |
+
unset -f deactivate
|
33 |
+
fi
|
34 |
+
}
|
35 |
+
|
36 |
+
# unset irrelevant variables
|
37 |
+
deactivate nondestructive
|
38 |
+
|
39 |
+
# on Windows, a path can contain colons and backslashes and has to be converted:
|
40 |
+
case "$(uname)" in
|
41 |
+
CYGWIN*|MSYS*|MINGW*)
|
42 |
+
# transform D:\path\to\venv to /d/path/to/venv on MSYS and MINGW
|
43 |
+
# and to /cygdrive/d/path/to/venv on Cygwin
|
44 |
+
VIRTUAL_ENV=$(cygpath /Users/niharpalem/Desktop/vscode/resume_EandA/venv)
|
45 |
+
export VIRTUAL_ENV
|
46 |
+
;;
|
47 |
+
*)
|
48 |
+
# use the path as-is
|
49 |
+
export VIRTUAL_ENV=/Users/niharpalem/Desktop/vscode/resume_EandA/venv
|
50 |
+
;;
|
51 |
+
esac
|
52 |
+
|
53 |
+
_OLD_VIRTUAL_PATH="$PATH"
|
54 |
+
PATH="$VIRTUAL_ENV/"bin":$PATH"
|
55 |
+
export PATH
|
56 |
+
|
57 |
+
VIRTUAL_ENV_PROMPT=venv
|
58 |
+
export VIRTUAL_ENV_PROMPT
|
59 |
+
|
60 |
+
# unset PYTHONHOME if set
|
61 |
+
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
62 |
+
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
63 |
+
if [ -n "${PYTHONHOME:-}" ] ; then
|
64 |
+
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
65 |
+
unset PYTHONHOME
|
66 |
+
fi
|
67 |
+
|
68 |
+
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
69 |
+
_OLD_VIRTUAL_PS1="${PS1:-}"
|
70 |
+
PS1="("venv") ${PS1:-}"
|
71 |
+
export PS1
|
72 |
+
fi
|
73 |
+
|
74 |
+
# Call hash to forget past commands. Without forgetting
|
75 |
+
# past commands the $PATH changes we made may not be respected
|
76 |
+
hash -r 2> /dev/null
|
venv/bin/activate.csh
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file must be used with "source bin/activate.csh" *from csh*.
|
2 |
+
# You cannot run it directly.
|
3 |
+
|
4 |
+
# Created by Davide Di Blasi <[email protected]>.
|
5 |
+
# Ported to Python 3.3 venv by Andrew Svetlov <[email protected]>
|
6 |
+
|
7 |
+
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
8 |
+
|
9 |
+
# Unset irrelevant variables.
|
10 |
+
deactivate nondestructive
|
11 |
+
|
12 |
+
setenv VIRTUAL_ENV /Users/niharpalem/Desktop/vscode/resume_EandA/venv
|
13 |
+
|
14 |
+
set _OLD_VIRTUAL_PATH="$PATH"
|
15 |
+
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
|
16 |
+
setenv VIRTUAL_ENV_PROMPT venv
|
17 |
+
|
18 |
+
|
19 |
+
set _OLD_VIRTUAL_PROMPT="$prompt"
|
20 |
+
|
21 |
+
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
22 |
+
set prompt = "("venv") $prompt:q"
|
23 |
+
endif
|
24 |
+
|
25 |
+
alias pydoc python -m pydoc
|
26 |
+
|
27 |
+
rehash
|
venv/bin/activate.fish
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
2 |
+
# (https://fishshell.com/). You cannot run it directly.
|
3 |
+
|
4 |
+
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
5 |
+
# reset old environment variables
|
6 |
+
if test -n "$_OLD_VIRTUAL_PATH"
|
7 |
+
set -gx PATH $_OLD_VIRTUAL_PATH
|
8 |
+
set -e _OLD_VIRTUAL_PATH
|
9 |
+
end
|
10 |
+
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
11 |
+
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
12 |
+
set -e _OLD_VIRTUAL_PYTHONHOME
|
13 |
+
end
|
14 |
+
|
15 |
+
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
16 |
+
set -e _OLD_FISH_PROMPT_OVERRIDE
|
17 |
+
# prevents error when using nested fish instances (Issue #93858)
|
18 |
+
if functions -q _old_fish_prompt
|
19 |
+
functions -e fish_prompt
|
20 |
+
functions -c _old_fish_prompt fish_prompt
|
21 |
+
functions -e _old_fish_prompt
|
22 |
+
end
|
23 |
+
end
|
24 |
+
|
25 |
+
set -e VIRTUAL_ENV
|
26 |
+
set -e VIRTUAL_ENV_PROMPT
|
27 |
+
if test "$argv[1]" != "nondestructive"
|
28 |
+
# Self-destruct!
|
29 |
+
functions -e deactivate
|
30 |
+
end
|
31 |
+
end
|
32 |
+
|
33 |
+
# Unset irrelevant variables.
|
34 |
+
deactivate nondestructive
|
35 |
+
|
36 |
+
set -gx VIRTUAL_ENV /Users/niharpalem/Desktop/vscode/resume_EandA/venv
|
37 |
+
|
38 |
+
set -gx _OLD_VIRTUAL_PATH $PATH
|
39 |
+
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
|
40 |
+
set -gx VIRTUAL_ENV_PROMPT venv
|
41 |
+
|
42 |
+
# Unset PYTHONHOME if set.
|
43 |
+
if set -q PYTHONHOME
|
44 |
+
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
45 |
+
set -e PYTHONHOME
|
46 |
+
end
|
47 |
+
|
48 |
+
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
49 |
+
# fish uses a function instead of an env var to generate the prompt.
|
50 |
+
|
51 |
+
# Save the current fish_prompt function as the function _old_fish_prompt.
|
52 |
+
functions -c fish_prompt _old_fish_prompt
|
53 |
+
|
54 |
+
# With the original prompt function renamed, we can override with our own.
|
55 |
+
function fish_prompt
|
56 |
+
# Save the return status of the last command.
|
57 |
+
set -l old_status $status
|
58 |
+
|
59 |
+
# Output the venv prompt; color taken from the blue of the Python logo.
|
60 |
+
printf "%s(%s)%s " (set_color 4B8BBE) venv (set_color normal)
|
61 |
+
|
62 |
+
# Restore the return status of the previous command.
|
63 |
+
echo "exit $old_status" | .
|
64 |
+
# Output the original/"old" prompt.
|
65 |
+
_old_fish_prompt
|
66 |
+
end
|
67 |
+
|
68 |
+
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
69 |
+
end
|
venv/bin/distro
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from distro.distro import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv/bin/docx2txt
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
|
3 |
+
import docx2txt
|
4 |
+
|
5 |
+
if __name__ == '__main__':
|
6 |
+
import sys
|
7 |
+
args = docx2txt.process_args()
|
8 |
+
text = docx2txt.process(args.docx, args.img_dir)
|
9 |
+
output = getattr(sys.stdout, 'buffer', sys.stdout)
|
10 |
+
output.write(text.encode('utf-8'))
|
venv/bin/f2py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from numpy.f2py.f2py2e import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv/bin/httpx
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from httpx import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv/bin/jsonschema
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from jsonschema.cli import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv/bin/markdown-it
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from markdown_it.cli.parse import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv/bin/normalizer
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from charset_normalizer import cli
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(cli.cli_detect())
|
venv/bin/numpy-config
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from numpy._configtool import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv/bin/pip
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from pip._internal.cli.main import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv/bin/pip3
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from pip._internal.cli.main import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv/bin/pip3.13
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from pip._internal.cli.main import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv/bin/pygmentize
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from pygments.cmdline import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv/bin/python
ADDED
Binary file (52.6 kB). View file
|
|
venv/bin/python3
ADDED
Binary file (52.6 kB). View file
|
|
venv/bin/python3.13
ADDED
Binary file (52.6 kB). View file
|
|
venv/bin/streamlit
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/Users/niharpalem/Desktop/vscode/resume_EandA/venv/bin/python3.13
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import re
|
4 |
+
import sys
|
5 |
+
from streamlit.web.cli import main
|
6 |
+
if __name__ == '__main__':
|
7 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
8 |
+
sys.exit(main())
|
venv/bin/streamlit.cmd
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
rem Copyright (c) Streamlit Inc. (2018-2022) Snowflake Inc. (2022-2024)
|
2 |
+
rem
|
3 |
+
rem Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
rem you may not use this file except in compliance with the License.
|
5 |
+
rem You may obtain a copy of the License at
|
6 |
+
rem
|
7 |
+
rem http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
rem
|
9 |
+
rem Unless required by applicable law or agreed to in writing, software
|
10 |
+
rem distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
rem See the License for the specific language governing permissions and
|
13 |
+
rem limitations under the License.
|
14 |
+
|
15 |
+
@echo OFF
|
16 |
+
python -m streamlit %*
|
venv/etc/jupyter/nbconfig/notebook.d/pydeck.json
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"load_extensions": {
|
3 |
+
"pydeck/extension": true
|
4 |
+
}
|
5 |
+
}
|
venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/AUTHORS
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
GitPython was originally written by Michael Trier.
|
2 |
+
GitPython 0.2 was partially (re)written by Sebastian Thiel, based on 0.1.6 and git-dulwich.
|
3 |
+
|
4 |
+
Contributors are:
|
5 |
+
|
6 |
+
-Michael Trier <mtrier _at_ gmail.com>
|
7 |
+
-Alan Briolat
|
8 |
+
-Florian Apolloner <florian _at_ apolloner.eu>
|
9 |
+
-David Aguilar <davvid _at_ gmail.com>
|
10 |
+
-Jelmer Vernooij <jelmer _at_ samba.org>
|
11 |
+
-Steve Frécinaux <code _at_ istique.net>
|
12 |
+
-Kai Lautaportti <kai _at_ lautaportti.fi>
|
13 |
+
-Paul Sowden <paul _at_ idontsmoke.co.uk>
|
14 |
+
-Sebastian Thiel <byronimo _at_ gmail.com>
|
15 |
+
-Jonathan Chu <jonathan.chu _at_ me.com>
|
16 |
+
-Vincent Driessen <me _at_ nvie.com>
|
17 |
+
-Phil Elson <pelson _dot_ pub _at_ gmail.com>
|
18 |
+
-Bernard `Guyzmo` Pratz <[email protected]>
|
19 |
+
-Timothy B. Hartman <tbhartman _at_ gmail.com>
|
20 |
+
-Konstantin Popov <konstantin.popov.89 _at_ yandex.ru>
|
21 |
+
-Peter Jones <pjones _at_ redhat.com>
|
22 |
+
-Anson Mansfield <anson.mansfield _at_ gmail.com>
|
23 |
+
-Ken Odegard <ken.odegard _at_ gmail.com>
|
24 |
+
-Alexis Horgix Chotard
|
25 |
+
-Piotr Babij <piotr.babij _at_ gmail.com>
|
26 |
+
-Mikuláš Poul <mikulaspoul _at_ gmail.com>
|
27 |
+
-Charles Bouchard-Légaré <cblegare.atl _at_ ntis.ca>
|
28 |
+
-Yaroslav Halchenko <debian _at_ onerussian.com>
|
29 |
+
-Tim Swast <swast _at_ google.com>
|
30 |
+
-William Luc Ritchie
|
31 |
+
-David Host <hostdm _at_ outlook.com>
|
32 |
+
-A. Jesse Jiryu Davis <jesse _at_ emptysquare.net>
|
33 |
+
-Steven Whitman <ninloot _at_ gmail.com>
|
34 |
+
-Stefan Stancu <stefan.stancu _at_ gmail.com>
|
35 |
+
-César Izurieta <cesar _at_ caih.org>
|
36 |
+
-Arthur Milchior <arthur _at_ milchior.fr>
|
37 |
+
-Anil Khatri <anil.soccer.khatri _at_ gmail.com>
|
38 |
+
-JJ Graham <thetwoj _at_ gmail.com>
|
39 |
+
-Ben Thayer <ben _at_ benthayer.com>
|
40 |
+
-Dries Kennes <admin _at_ dries007.net>
|
41 |
+
-Pratik Anurag <panurag247365 _at_ gmail.com>
|
42 |
+
-Harmon <harmon.public _at_ gmail.com>
|
43 |
+
-Liam Beguin <liambeguin _at_ gmail.com>
|
44 |
+
-Ram Rachum <ram _at_ rachum.com>
|
45 |
+
-Alba Mendez <me _at_ alba.sh>
|
46 |
+
-Robert Westman <robert _at_ byteflux.io>
|
47 |
+
-Hugo van Kemenade
|
48 |
+
-Hiroki Tokunaga <tokusan441 _at_ gmail.com>
|
49 |
+
-Julien Mauroy <pro.julien.mauroy _at_ gmail.com>
|
50 |
+
-Patrick Gerard
|
51 |
+
-Luke Twist <[email protected]>
|
52 |
+
-Joseph Hale <me _at_ jhale.dev>
|
53 |
+
-Santos Gallegos <stsewd _at_ proton.me>
|
54 |
+
-Wenhan Zhu <wzhu.cosmos _at_ gmail.com>
|
55 |
+
-Eliah Kagan <eliah.kagan _at_ gmail.com>
|
56 |
+
-Ethan Lin <et.repositories _at_ gmail.com>
|
57 |
+
-Jonas Scharpf <jonas.scharpf _at_ checkmk.com>
|
58 |
+
|
59 |
+
Portions derived from other open source works and are clearly marked.
|
venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/INSTALLER
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pip
|
venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/LICENSE
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Copyright (C) 2008, 2009 Michael Trier and contributors
|
2 |
+
All rights reserved.
|
3 |
+
|
4 |
+
Redistribution and use in source and binary forms, with or without
|
5 |
+
modification, are permitted provided that the following conditions
|
6 |
+
are met:
|
7 |
+
|
8 |
+
* Redistributions of source code must retain the above copyright
|
9 |
+
notice, this list of conditions and the following disclaimer.
|
10 |
+
|
11 |
+
* Redistributions in binary form must reproduce the above copyright
|
12 |
+
notice, this list of conditions and the following disclaimer in the
|
13 |
+
documentation and/or other materials provided with the distribution.
|
14 |
+
|
15 |
+
* Neither the name of the GitPython project nor the names of
|
16 |
+
its contributors may be used to endorse or promote products derived
|
17 |
+
from this software without specific prior written permission.
|
18 |
+
|
19 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
20 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
21 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
22 |
+
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
23 |
+
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
24 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
25 |
+
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
26 |
+
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
27 |
+
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
28 |
+
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
29 |
+
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/METADATA
ADDED
@@ -0,0 +1,295 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metadata-Version: 2.1
|
2 |
+
Name: GitPython
|
3 |
+
Version: 3.1.44
|
4 |
+
Summary: GitPython is a Python library used to interact with Git repositories
|
5 |
+
Home-page: https://github.com/gitpython-developers/GitPython
|
6 |
+
Author: Sebastian Thiel, Michael Trier
|
7 |
+
Author-email: [email protected], [email protected]
|
8 |
+
License: BSD-3-Clause
|
9 |
+
Classifier: Development Status :: 5 - Production/Stable
|
10 |
+
Classifier: Environment :: Console
|
11 |
+
Classifier: Intended Audience :: Developers
|
12 |
+
Classifier: License :: OSI Approved :: BSD License
|
13 |
+
Classifier: Operating System :: OS Independent
|
14 |
+
Classifier: Operating System :: POSIX
|
15 |
+
Classifier: Operating System :: Microsoft :: Windows
|
16 |
+
Classifier: Operating System :: MacOS :: MacOS X
|
17 |
+
Classifier: Typing :: Typed
|
18 |
+
Classifier: Programming Language :: Python
|
19 |
+
Classifier: Programming Language :: Python :: 3
|
20 |
+
Classifier: Programming Language :: Python :: 3.7
|
21 |
+
Classifier: Programming Language :: Python :: 3.8
|
22 |
+
Classifier: Programming Language :: Python :: 3.9
|
23 |
+
Classifier: Programming Language :: Python :: 3.10
|
24 |
+
Classifier: Programming Language :: Python :: 3.11
|
25 |
+
Classifier: Programming Language :: Python :: 3.12
|
26 |
+
Requires-Python: >=3.7
|
27 |
+
Description-Content-Type: text/markdown
|
28 |
+
License-File: LICENSE
|
29 |
+
License-File: AUTHORS
|
30 |
+
Requires-Dist: gitdb<5,>=4.0.1
|
31 |
+
Requires-Dist: typing-extensions>=3.7.4.3; python_version < "3.8"
|
32 |
+
Provides-Extra: test
|
33 |
+
Requires-Dist: coverage[toml]; extra == "test"
|
34 |
+
Requires-Dist: ddt!=1.4.3,>=1.1.1; extra == "test"
|
35 |
+
Requires-Dist: mock; python_version < "3.8" and extra == "test"
|
36 |
+
Requires-Dist: mypy; extra == "test"
|
37 |
+
Requires-Dist: pre-commit; extra == "test"
|
38 |
+
Requires-Dist: pytest>=7.3.1; extra == "test"
|
39 |
+
Requires-Dist: pytest-cov; extra == "test"
|
40 |
+
Requires-Dist: pytest-instafail; extra == "test"
|
41 |
+
Requires-Dist: pytest-mock; extra == "test"
|
42 |
+
Requires-Dist: pytest-sugar; extra == "test"
|
43 |
+
Requires-Dist: typing-extensions; python_version < "3.11" and extra == "test"
|
44 |
+
Provides-Extra: doc
|
45 |
+
Requires-Dist: sphinx<7.2,>=7.1.2; extra == "doc"
|
46 |
+
Requires-Dist: sphinx_rtd_theme; extra == "doc"
|
47 |
+
Requires-Dist: sphinx-autodoc-typehints; extra == "doc"
|
48 |
+
|
49 |
+

|
50 |
+
[](https://readthedocs.org/projects/gitpython/?badge=stable)
|
51 |
+
[](https://repology.org/metapackage/python:gitpython/versions)
|
52 |
+
|
53 |
+
## [Gitoxide](https://github.com/Byron/gitoxide): A peek into the future…
|
54 |
+
|
55 |
+
I started working on GitPython in 2009, back in the days when Python was 'my thing' and I had great plans with it.
|
56 |
+
Of course, back in the days, I didn't really know what I was doing and this shows in many places. Somewhat similar to
|
57 |
+
Python this happens to be 'good enough', but at the same time is deeply flawed and broken beyond repair.
|
58 |
+
|
59 |
+
By now, GitPython is widely used and I am sure there is a good reason for that, it's something to be proud of and happy about.
|
60 |
+
The community is maintaining the software and is keeping it relevant for which I am absolutely grateful. For the time to come I am happy to continue maintaining GitPython, remaining hopeful that one day it won't be needed anymore.
|
61 |
+
|
62 |
+
More than 15 years after my first meeting with 'git' I am still in excited about it, and am happy to finally have the tools and
|
63 |
+
probably the skills to scratch that itch of mine: implement `git` in a way that makes tool creation a piece of cake for most.
|
64 |
+
|
65 |
+
If you like the idea and want to learn more, please head over to [gitoxide](https://github.com/Byron/gitoxide), an
|
66 |
+
implementation of 'git' in [Rust](https://www.rust-lang.org).
|
67 |
+
|
68 |
+
*(Please note that `gitoxide` is not currently available for use in Python, and that Rust is required.)*
|
69 |
+
|
70 |
+
## GitPython
|
71 |
+
|
72 |
+
GitPython is a python library used to interact with git repositories, high-level like git-porcelain,
|
73 |
+
or low-level like git-plumbing.
|
74 |
+
|
75 |
+
It provides abstractions of git objects for easy access of repository data often backed by calling the `git`
|
76 |
+
command-line program.
|
77 |
+
|
78 |
+
### DEVELOPMENT STATUS
|
79 |
+
|
80 |
+
This project is in **maintenance mode**, which means that
|
81 |
+
|
82 |
+
- …there will be no feature development, unless these are contributed
|
83 |
+
- …there will be no bug fixes, unless they are relevant to the safety of users, or contributed
|
84 |
+
- …issues will be responded to with waiting times of up to a month
|
85 |
+
|
86 |
+
The project is open to contributions of all kinds, as well as new maintainers.
|
87 |
+
|
88 |
+
### REQUIREMENTS
|
89 |
+
|
90 |
+
GitPython needs the `git` executable to be installed on the system and available in your
|
91 |
+
`PATH` for most operations. If it is not in your `PATH`, you can help GitPython find it
|
92 |
+
by setting the `GIT_PYTHON_GIT_EXECUTABLE=<path/to/git>` environment variable.
|
93 |
+
|
94 |
+
- Git (1.7.x or newer)
|
95 |
+
- Python >= 3.7
|
96 |
+
|
97 |
+
The list of dependencies are listed in `./requirements.txt` and `./test-requirements.txt`.
|
98 |
+
The installer takes care of installing them for you.
|
99 |
+
|
100 |
+
### INSTALL
|
101 |
+
|
102 |
+
GitPython and its required package dependencies can be installed in any of the following ways, all of which should typically be done in a [virtual environment](https://docs.python.org/3/tutorial/venv.html).
|
103 |
+
|
104 |
+
#### From PyPI
|
105 |
+
|
106 |
+
To obtain and install a copy [from PyPI](https://pypi.org/project/GitPython/), run:
|
107 |
+
|
108 |
+
```sh
|
109 |
+
pip install GitPython
|
110 |
+
```
|
111 |
+
|
112 |
+
(A distribution package can also be downloaded for manual installation at [the PyPI page](https://pypi.org/project/GitPython/).)
|
113 |
+
|
114 |
+
#### From downloaded source code
|
115 |
+
|
116 |
+
If you have downloaded the source code, run this from inside the unpacked `GitPython` directory:
|
117 |
+
|
118 |
+
```sh
|
119 |
+
pip install .
|
120 |
+
```
|
121 |
+
|
122 |
+
#### By cloning the source code repository
|
123 |
+
|
124 |
+
To clone the [the GitHub repository](https://github.com/gitpython-developers/GitPython) from source to work on the code, you can do it like so:
|
125 |
+
|
126 |
+
```sh
|
127 |
+
git clone https://github.com/gitpython-developers/GitPython
|
128 |
+
cd GitPython
|
129 |
+
./init-tests-after-clone.sh
|
130 |
+
```
|
131 |
+
|
132 |
+
On Windows, `./init-tests-after-clone.sh` can be run in a Git Bash shell.
|
133 |
+
|
134 |
+
If you are cloning [your own fork](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/about-forks), then replace the above `git clone` command with one that gives the URL of your fork. Or use this [`gh`](https://cli.github.com/) command (assuming you have `gh` and your fork is called `GitPython`):
|
135 |
+
|
136 |
+
```sh
|
137 |
+
gh repo clone GitPython
|
138 |
+
```
|
139 |
+
|
140 |
+
Having cloned the repo, create and activate your [virtual environment](https://docs.python.org/3/tutorial/venv.html).
|
141 |
+
|
142 |
+
Then make an [editable install](https://pip.pypa.io/en/stable/topics/local-project-installs/#editable-installs):
|
143 |
+
|
144 |
+
```sh
|
145 |
+
pip install -e ".[test]"
|
146 |
+
```
|
147 |
+
|
148 |
+
In the less common case that you do not want to install test dependencies, `pip install -e .` can be used instead.
|
149 |
+
|
150 |
+
#### With editable *dependencies* (not preferred, and rarely needed)
|
151 |
+
|
152 |
+
In rare cases, you may want to work on GitPython and one or both of its [gitdb](https://github.com/gitpython-developers/gitdb) and [smmap](https://github.com/gitpython-developers/smmap) dependencies at the same time, with changes in your local working copy of gitdb or smmap immediately reflected in the behavior of your local working copy of GitPython. This can be done by making editable installations of those dependencies in the same virtual environment where you install GitPython.
|
153 |
+
|
154 |
+
If you want to do that *and* you want the versions in GitPython's git submodules to be used, then pass `-e git/ext/gitdb` and/or `-e git/ext/gitdb/gitdb/ext/smmap` to `pip install`. This can be done in any order, and in separate `pip install` commands or the same one, so long as `-e` appears before *each* path. For example, you can install GitPython, gitdb, and smmap editably in the currently active virtual environment this way:
|
155 |
+
|
156 |
+
```sh
|
157 |
+
pip install -e ".[test]" -e git/ext/gitdb -e git/ext/gitdb/gitdb/ext/smmap
|
158 |
+
```
|
159 |
+
|
160 |
+
The submodules must have been cloned for that to work, but that will already be the case if you have run `./init-tests-after-clone.sh`. You can use `pip list` to check which packages are installed editably and which are installed normally.
|
161 |
+
|
162 |
+
To reiterate, this approach should only rarely be used. For most development it is preferable to allow the gitdb and smmap dependencices to be retrieved automatically from PyPI in their latest stable packaged versions.
|
163 |
+
|
164 |
+
### Limitations
|
165 |
+
|
166 |
+
#### Leakage of System Resources
|
167 |
+
|
168 |
+
GitPython is not suited for long-running processes (like daemons) as it tends to
|
169 |
+
leak system resources. It was written in a time where destructors (as implemented
|
170 |
+
in the `__del__` method) still ran deterministically.
|
171 |
+
|
172 |
+
In case you still want to use it in such a context, you will want to search the
|
173 |
+
codebase for `__del__` implementations and call these yourself when you see fit.
|
174 |
+
|
175 |
+
Another way assure proper cleanup of resources is to factor out GitPython into a
|
176 |
+
separate process which can be dropped periodically.
|
177 |
+
|
178 |
+
#### Windows support
|
179 |
+
|
180 |
+
See [Issue #525](https://github.com/gitpython-developers/GitPython/issues/525).
|
181 |
+
|
182 |
+
### RUNNING TESTS
|
183 |
+
|
184 |
+
_Important_: Right after cloning this repository, please be sure to have executed
|
185 |
+
the `./init-tests-after-clone.sh` script in the repository root. Otherwise
|
186 |
+
you will encounter test failures.
|
187 |
+
|
188 |
+
#### Install test dependencies
|
189 |
+
|
190 |
+
Ensure testing libraries are installed. This is taken care of already if you installed with:
|
191 |
+
|
192 |
+
```sh
|
193 |
+
pip install -e ".[test]"
|
194 |
+
```
|
195 |
+
|
196 |
+
If you had installed with a command like `pip install -e .` instead, you can still run
|
197 |
+
the above command to add the testing dependencies.
|
198 |
+
|
199 |
+
#### Test commands
|
200 |
+
|
201 |
+
To test, run:
|
202 |
+
|
203 |
+
```sh
|
204 |
+
pytest
|
205 |
+
```
|
206 |
+
|
207 |
+
To lint, and apply some linting fixes as well as automatic code formatting, run:
|
208 |
+
|
209 |
+
```sh
|
210 |
+
pre-commit run --all-files
|
211 |
+
```
|
212 |
+
|
213 |
+
This includes the linting and autoformatting done by Ruff, as well as some other checks.
|
214 |
+
|
215 |
+
To typecheck, run:
|
216 |
+
|
217 |
+
```sh
|
218 |
+
mypy
|
219 |
+
```
|
220 |
+
|
221 |
+
#### CI (and tox)
|
222 |
+
|
223 |
+
Style and formatting checks, and running tests on all the different supported Python versions, will be performed:
|
224 |
+
|
225 |
+
- Upon submitting a pull request.
|
226 |
+
- On each push, *if* you have a fork with GitHub Actions enabled.
|
227 |
+
- Locally, if you run [`tox`](https://tox.wiki/) (this skips any Python versions you don't have installed).
|
228 |
+
|
229 |
+
#### Configuration files
|
230 |
+
|
231 |
+
Specific tools are all configured in the `./pyproject.toml` file:
|
232 |
+
|
233 |
+
- `pytest` (test runner)
|
234 |
+
- `coverage.py` (code coverage)
|
235 |
+
- `ruff` (linter and formatter)
|
236 |
+
- `mypy` (type checker)
|
237 |
+
|
238 |
+
Orchestration tools:
|
239 |
+
|
240 |
+
- Configuration for `pre-commit` is in the `./.pre-commit-config.yaml` file.
|
241 |
+
- Configuration for `tox` is in `./tox.ini`.
|
242 |
+
- Configuration for GitHub Actions (CI) is in files inside `./.github/workflows/`.
|
243 |
+
|
244 |
+
### Contributions
|
245 |
+
|
246 |
+
Please have a look at the [contributions file][contributing].
|
247 |
+
|
248 |
+
### INFRASTRUCTURE
|
249 |
+
|
250 |
+
- [User Documentation](http://gitpython.readthedocs.org)
|
251 |
+
- [Questions and Answers](http://stackexchange.com/filters/167317/gitpython)
|
252 |
+
- Please post on Stack Overflow and use the `gitpython` tag
|
253 |
+
- [Issue Tracker](https://github.com/gitpython-developers/GitPython/issues)
|
254 |
+
- Post reproducible bugs and feature requests as a new issue.
|
255 |
+
Please be sure to provide the following information if posting bugs:
|
256 |
+
- GitPython version (e.g. `import git; git.__version__`)
|
257 |
+
- Python version (e.g. `python --version`)
|
258 |
+
- The encountered stack-trace, if applicable
|
259 |
+
- Enough information to allow reproducing the issue
|
260 |
+
|
261 |
+
### How to make a new release
|
262 |
+
|
263 |
+
1. Update/verify the **version** in the `VERSION` file.
|
264 |
+
2. Update/verify that the `doc/source/changes.rst` changelog file was updated. It should include a link to the forthcoming release page: `https://github.com/gitpython-developers/GitPython/releases/tag/<version>`
|
265 |
+
3. Commit everything.
|
266 |
+
4. Run `git tag -s <version>` to tag the version in Git.
|
267 |
+
5. _Optionally_ create and activate a [virtual environment](https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment). (Then the next step can install `build` and `twine`.)
|
268 |
+
6. Run `make release`.
|
269 |
+
7. Go to [GitHub Releases](https://github.com/gitpython-developers/GitPython/releases) and publish a new one with the recently pushed tag. Generate the changelog.
|
270 |
+
|
271 |
+
### Projects using GitPython
|
272 |
+
|
273 |
+
- [PyDriller](https://github.com/ishepard/pydriller)
|
274 |
+
- [Kivy Designer](https://github.com/kivy/kivy-designer)
|
275 |
+
- [Prowl](https://github.com/nettitude/Prowl)
|
276 |
+
- [Python Taint](https://github.com/python-security/pyt)
|
277 |
+
- [Buster](https://github.com/axitkhurana/buster)
|
278 |
+
- [git-ftp](https://github.com/ezyang/git-ftp)
|
279 |
+
- [Git-Pandas](https://github.com/wdm0006/git-pandas)
|
280 |
+
- [PyGitUp](https://github.com/msiemens/PyGitUp)
|
281 |
+
- [PyJFuzz](https://github.com/mseclab/PyJFuzz)
|
282 |
+
- [Loki](https://github.com/Neo23x0/Loki)
|
283 |
+
- [Omniwallet](https://github.com/OmniLayer/omniwallet)
|
284 |
+
- [GitViper](https://github.com/BeayemX/GitViper)
|
285 |
+
- [Git Gud](https://github.com/bthayer2365/git-gud)
|
286 |
+
|
287 |
+
### LICENSE
|
288 |
+
|
289 |
+
[3-Clause BSD License](https://opensource.org/license/bsd-3-clause/), also known as the New BSD License. See the [LICENSE file][license].
|
290 |
+
|
291 |
+
One file exclusively used for fuzz testing is subject to [a separate license, detailed here](./fuzzing/README.md#license).
|
292 |
+
This file is not included in the wheel or sdist packages published by the maintainers of GitPython.
|
293 |
+
|
294 |
+
[contributing]: https://github.com/gitpython-developers/GitPython/blob/main/CONTRIBUTING.md
|
295 |
+
[license]: https://github.com/gitpython-developers/GitPython/blob/main/LICENSE
|
venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/RECORD
ADDED
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
GitPython-3.1.44.dist-info/AUTHORS,sha256=tZ9LuyBks2V2HKTPK7kCmtd9Guu_LyU1oZHvU0NiAok,2334
|
2 |
+
GitPython-3.1.44.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
3 |
+
GitPython-3.1.44.dist-info/LICENSE,sha256=hvyUwyGpr7wRUUcTURuv3tIl8lEA3MD3NQ6CvCMbi-s,1503
|
4 |
+
GitPython-3.1.44.dist-info/METADATA,sha256=0O_Fr2Y7A-DlPYhlbSxGjblBC2mWkw3USNUhyL80Ip8,13245
|
5 |
+
GitPython-3.1.44.dist-info/RECORD,,
|
6 |
+
GitPython-3.1.44.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
7 |
+
GitPython-3.1.44.dist-info/top_level.txt,sha256=0hzDuIp8obv624V3GmbqsagBWkk8ohtGU-Bc1PmTT0o,4
|
8 |
+
git/__init__.py,sha256=nkQImgv-bWdiZOFDjzN-gbt93FoRHD0nY6_t9LQxy4Y,8899
|
9 |
+
git/__pycache__/__init__.cpython-313.pyc,,
|
10 |
+
git/__pycache__/cmd.cpython-313.pyc,,
|
11 |
+
git/__pycache__/compat.cpython-313.pyc,,
|
12 |
+
git/__pycache__/config.cpython-313.pyc,,
|
13 |
+
git/__pycache__/db.cpython-313.pyc,,
|
14 |
+
git/__pycache__/diff.cpython-313.pyc,,
|
15 |
+
git/__pycache__/exc.cpython-313.pyc,,
|
16 |
+
git/__pycache__/remote.cpython-313.pyc,,
|
17 |
+
git/__pycache__/types.cpython-313.pyc,,
|
18 |
+
git/__pycache__/util.cpython-313.pyc,,
|
19 |
+
git/cmd.py,sha256=QwiaBy0mFbi9xjRKhRgUVK-_-K6xVdFqh9l0cxPqPSc,67724
|
20 |
+
git/compat.py,sha256=y1E6y6O2q5r8clSlr8ZNmuIWG9nmHuehQEsVsmBffs8,4526
|
21 |
+
git/config.py,sha256=vTUlK6d8ORqFqjOv4Vbq_Hm-5mp-jOAt1dkq0IdzJ3U,34933
|
22 |
+
git/db.py,sha256=vIW9uWSbqu99zbuU2ZDmOhVOv1UPTmxrnqiCtRHCfjE,2368
|
23 |
+
git/diff.py,sha256=wmpMCIdMiVOqreGVPOGYyO4gFboGOAicyrvvI7PPjEg,27095
|
24 |
+
git/exc.py,sha256=Gc7g1pHpn8OmTse30NHmJVsBJ2CYH8LxaR8y8UA3lIM,7119
|
25 |
+
git/index/__init__.py,sha256=i-Nqb8Lufp9aFbmxpQBORmmQnjEVVM1Pn58fsQkyGgQ,406
|
26 |
+
git/index/__pycache__/__init__.cpython-313.pyc,,
|
27 |
+
git/index/__pycache__/base.cpython-313.pyc,,
|
28 |
+
git/index/__pycache__/fun.cpython-313.pyc,,
|
29 |
+
git/index/__pycache__/typ.cpython-313.pyc,,
|
30 |
+
git/index/__pycache__/util.cpython-313.pyc,,
|
31 |
+
git/index/base.py,sha256=nDD7XVLNbgBKpJMrrTVyHBy6NVLWgDkk7oUw6ZOegPc,60808
|
32 |
+
git/index/fun.py,sha256=37cA3DBC9vpAnSVu5TGA072SnoF5XZOkOukExwlejHs,16736
|
33 |
+
git/index/typ.py,sha256=uuKNwitUw83FhVaLSwo4pY7PHDQudtZTLJrLGym4jcI,6570
|
34 |
+
git/index/util.py,sha256=fULi7GPG-MvprKrRCD5c15GNdzku_1E38We0d97WB3A,3659
|
35 |
+
git/objects/__init__.py,sha256=O6ZL_olX7e5-8iIbKviRPkVSJxN37WA-EC0q9d48U5Y,637
|
36 |
+
git/objects/__pycache__/__init__.cpython-313.pyc,,
|
37 |
+
git/objects/__pycache__/base.cpython-313.pyc,,
|
38 |
+
git/objects/__pycache__/blob.cpython-313.pyc,,
|
39 |
+
git/objects/__pycache__/commit.cpython-313.pyc,,
|
40 |
+
git/objects/__pycache__/fun.cpython-313.pyc,,
|
41 |
+
git/objects/__pycache__/tag.cpython-313.pyc,,
|
42 |
+
git/objects/__pycache__/tree.cpython-313.pyc,,
|
43 |
+
git/objects/__pycache__/util.cpython-313.pyc,,
|
44 |
+
git/objects/base.py,sha256=0dqNkSRVH0mk0-7ZKIkGBK7iNYrzLTVxwQFUd6CagsE,10277
|
45 |
+
git/objects/blob.py,sha256=zwwq0KfOMYeP5J2tW5CQatoLyeqFRlfkxP1Vwx1h07s,1215
|
46 |
+
git/objects/commit.py,sha256=GH1_83C9t7RGTukwozTHDgvxYQPRjTHhPDkXJyBbJyo,30553
|
47 |
+
git/objects/fun.py,sha256=B4jCqhAjm6Hl79GK58FPzW1H9K6Wc7Tx0rssyWmAcEE,8935
|
48 |
+
git/objects/submodule/__init__.py,sha256=6xySp767LVz3UylWgUalntS_nGXRuVzXxDuFAv_Wc2c,303
|
49 |
+
git/objects/submodule/__pycache__/__init__.cpython-313.pyc,,
|
50 |
+
git/objects/submodule/__pycache__/base.cpython-313.pyc,,
|
51 |
+
git/objects/submodule/__pycache__/root.cpython-313.pyc,,
|
52 |
+
git/objects/submodule/__pycache__/util.cpython-313.pyc,,
|
53 |
+
git/objects/submodule/base.py,sha256=MQ-2xV8JznGwy2hLQv1aeQNgAkhBhgc5tdtClFL3DmE,63901
|
54 |
+
git/objects/submodule/root.py,sha256=5eTtYNHasqdPq6q0oDCPr7IaO6uAHL3b4DxMoiO2LhE,20246
|
55 |
+
git/objects/submodule/util.py,sha256=sQqAYaiSJdFkZa9NlAuK_wTsMNiS-kkQnQjvIoJtc_o,3509
|
56 |
+
git/objects/tag.py,sha256=jAGESnpmTEv-dLakPzheT5ILZFFArcItnXYqfxfDrgc,4441
|
57 |
+
git/objects/tree.py,sha256=jJH888SHiP4dGzE-ra1yenQOyya_0C_MkHr06c1gHpM,13849
|
58 |
+
git/objects/util.py,sha256=Nlza4zLgdPmr_Yasyvvs6c1rKtW_wMxI6wDmQpQ3ufw,23846
|
59 |
+
git/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
60 |
+
git/refs/__init__.py,sha256=DWlJNnsx-4jM_E-VycbP-FZUdn6iWhjnH_uZ_pZXBro,509
|
61 |
+
git/refs/__pycache__/__init__.cpython-313.pyc,,
|
62 |
+
git/refs/__pycache__/head.cpython-313.pyc,,
|
63 |
+
git/refs/__pycache__/log.cpython-313.pyc,,
|
64 |
+
git/refs/__pycache__/reference.cpython-313.pyc,,
|
65 |
+
git/refs/__pycache__/remote.cpython-313.pyc,,
|
66 |
+
git/refs/__pycache__/symbolic.cpython-313.pyc,,
|
67 |
+
git/refs/__pycache__/tag.cpython-313.pyc,,
|
68 |
+
git/refs/head.py,sha256=SGa3N301HfAi79X6UR5Mcg7mO9TnCH3Bk549kHlJVaQ,10513
|
69 |
+
git/refs/log.py,sha256=kXiuAgTo1DIuM_BfbDUk9gQ0YO-mutIMVdHv1_ES90o,12493
|
70 |
+
git/refs/reference.py,sha256=l6mhF4YLSEwtjz6b9PpOQH-fkng7EYWMaJhkjn-2jXA,5630
|
71 |
+
git/refs/remote.py,sha256=WwqV9T7BbYf3F_WZNUQivu9xktIIKGklCjDpwQrhD-A,2806
|
72 |
+
git/refs/symbolic.py,sha256=c8zOwaqzcg-J-rGrpuWdvh8zwMvSUqAHghd4vJoYG_s,34552
|
73 |
+
git/refs/tag.py,sha256=kgzV2vhpL4FD2TqHb0BJuMRAHgAvJF-TcoyWlaB-djQ,5010
|
74 |
+
git/remote.py,sha256=pYn9dAlz-QwvNMWXD1M57pMPQitthOM86qTRK_cpTqU,46786
|
75 |
+
git/repo/__init__.py,sha256=CILSVH36fX_WxVFSjD9o1WF5LgsNedPiJvSngKZqfVU,210
|
76 |
+
git/repo/__pycache__/__init__.cpython-313.pyc,,
|
77 |
+
git/repo/__pycache__/base.cpython-313.pyc,,
|
78 |
+
git/repo/__pycache__/fun.cpython-313.pyc,,
|
79 |
+
git/repo/base.py,sha256=0GU6nKNdT8SYjDI5Y5DeZ1zCEX3tHeq1VW2MSpne05g,59891
|
80 |
+
git/repo/fun.py,sha256=HSGC0-rqeKKx9fDg7JyQyMZgIwUWn-FnSZR_gRGpG-E,13573
|
81 |
+
git/types.py,sha256=MQzIDEOnoueXGsAJF_0MgUc_osH7Eu0Sw3DQofYzCVE,10272
|
82 |
+
git/util.py,sha256=2uAv34zZ_827-zJ3-D5ACrVH-4Q4EO_KLUTH23zi2AI,43770
|
venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/WHEEL
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Wheel-Version: 1.0
|
2 |
+
Generator: setuptools (75.6.0)
|
3 |
+
Root-Is-Purelib: true
|
4 |
+
Tag: py3-none-any
|
5 |
+
|
venv/lib/python3.13/site-packages/GitPython-3.1.44.dist-info/top_level.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
git
|
venv/lib/python3.13/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pip
|
venv/lib/python3.13/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Copyright 2010 Pallets
|
2 |
+
|
3 |
+
Redistribution and use in source and binary forms, with or without
|
4 |
+
modification, are permitted provided that the following conditions are
|
5 |
+
met:
|
6 |
+
|
7 |
+
1. Redistributions of source code must retain the above copyright
|
8 |
+
notice, this list of conditions and the following disclaimer.
|
9 |
+
|
10 |
+
2. Redistributions in binary form must reproduce the above copyright
|
11 |
+
notice, this list of conditions and the following disclaimer in the
|
12 |
+
documentation and/or other materials provided with the distribution.
|
13 |
+
|
14 |
+
3. Neither the name of the copyright holder nor the names of its
|
15 |
+
contributors may be used to endorse or promote products derived from
|
16 |
+
this software without specific prior written permission.
|
17 |
+
|
18 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
19 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
20 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
21 |
+
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
22 |
+
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
23 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
24 |
+
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
25 |
+
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
26 |
+
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
27 |
+
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
28 |
+
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
venv/lib/python3.13/site-packages/MarkupSafe-3.0.2.dist-info/METADATA
ADDED
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metadata-Version: 2.1
|
2 |
+
Name: MarkupSafe
|
3 |
+
Version: 3.0.2
|
4 |
+
Summary: Safely add untrusted strings to HTML/XML markup.
|
5 |
+
Maintainer-email: Pallets <[email protected]>
|
6 |
+
License: Copyright 2010 Pallets
|
7 |
+
|
8 |
+
Redistribution and use in source and binary forms, with or without
|
9 |
+
modification, are permitted provided that the following conditions are
|
10 |
+
met:
|
11 |
+
|
12 |
+
1. Redistributions of source code must retain the above copyright
|
13 |
+
notice, this list of conditions and the following disclaimer.
|
14 |
+
|
15 |
+
2. Redistributions in binary form must reproduce the above copyright
|
16 |
+
notice, this list of conditions and the following disclaimer in the
|
17 |
+
documentation and/or other materials provided with the distribution.
|
18 |
+
|
19 |
+
3. Neither the name of the copyright holder nor the names of its
|
20 |
+
contributors may be used to endorse or promote products derived from
|
21 |
+
this software without specific prior written permission.
|
22 |
+
|
23 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
24 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
25 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
26 |
+
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
27 |
+
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
28 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
29 |
+
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
30 |
+
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
31 |
+
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
32 |
+
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
33 |
+
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
34 |
+
|
35 |
+
Project-URL: Donate, https://palletsprojects.com/donate
|
36 |
+
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
|
37 |
+
Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
|
38 |
+
Project-URL: Source, https://github.com/pallets/markupsafe/
|
39 |
+
Project-URL: Chat, https://discord.gg/pallets
|
40 |
+
Classifier: Development Status :: 5 - Production/Stable
|
41 |
+
Classifier: Environment :: Web Environment
|
42 |
+
Classifier: Intended Audience :: Developers
|
43 |
+
Classifier: License :: OSI Approved :: BSD License
|
44 |
+
Classifier: Operating System :: OS Independent
|
45 |
+
Classifier: Programming Language :: Python
|
46 |
+
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
47 |
+
Classifier: Topic :: Text Processing :: Markup :: HTML
|
48 |
+
Classifier: Typing :: Typed
|
49 |
+
Requires-Python: >=3.9
|
50 |
+
Description-Content-Type: text/markdown
|
51 |
+
License-File: LICENSE.txt
|
52 |
+
|
53 |
+
# MarkupSafe
|
54 |
+
|
55 |
+
MarkupSafe implements a text object that escapes characters so it is
|
56 |
+
safe to use in HTML and XML. Characters that have special meanings are
|
57 |
+
replaced so that they display as the actual characters. This mitigates
|
58 |
+
injection attacks, meaning untrusted user input can safely be displayed
|
59 |
+
on a page.
|
60 |
+
|
61 |
+
|
62 |
+
## Examples
|
63 |
+
|
64 |
+
```pycon
|
65 |
+
>>> from markupsafe import Markup, escape
|
66 |
+
|
67 |
+
>>> # escape replaces special characters and wraps in Markup
|
68 |
+
>>> escape("<script>alert(document.cookie);</script>")
|
69 |
+
Markup('<script>alert(document.cookie);</script>')
|
70 |
+
|
71 |
+
>>> # wrap in Markup to mark text "safe" and prevent escaping
|
72 |
+
>>> Markup("<strong>Hello</strong>")
|
73 |
+
Markup('<strong>hello</strong>')
|
74 |
+
|
75 |
+
>>> escape(Markup("<strong>Hello</strong>"))
|
76 |
+
Markup('<strong>hello</strong>')
|
77 |
+
|
78 |
+
>>> # Markup is a str subclass
|
79 |
+
>>> # methods and operators escape their arguments
|
80 |
+
>>> template = Markup("Hello <em>{name}</em>")
|
81 |
+
>>> template.format(name='"World"')
|
82 |
+
Markup('Hello <em>"World"</em>')
|
83 |
+
```
|
84 |
+
|
85 |
+
## Donate
|
86 |
+
|
87 |
+
The Pallets organization develops and supports MarkupSafe and other
|
88 |
+
popular packages. In order to grow the community of contributors and
|
89 |
+
users, and allow the maintainers to devote more time to the projects,
|
90 |
+
[please donate today][].
|
91 |
+
|
92 |
+
[please donate today]: https://palletsprojects.com/donate
|
venv/lib/python3.13/site-packages/MarkupSafe-3.0.2.dist-info/RECORD
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
MarkupSafe-3.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
2 |
+
MarkupSafe-3.0.2.dist-info/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
|
3 |
+
MarkupSafe-3.0.2.dist-info/METADATA,sha256=aAwbZhSmXdfFuMM-rEHpeiHRkBOGESyVLJIuwzHP-nw,3975
|
4 |
+
MarkupSafe-3.0.2.dist-info/RECORD,,
|
5 |
+
MarkupSafe-3.0.2.dist-info/WHEEL,sha256=EhaGmhgTZV8uqhZxBmQmxqlBexDOCFpUXsFLjK8lF9g,109
|
6 |
+
MarkupSafe-3.0.2.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
|
7 |
+
markupsafe/__init__.py,sha256=sr-U6_27DfaSrj5jnHYxWN-pvhM27sjlDplMDPZKm7k,13214
|
8 |
+
markupsafe/__pycache__/__init__.cpython-313.pyc,,
|
9 |
+
markupsafe/__pycache__/_native.cpython-313.pyc,,
|
10 |
+
markupsafe/_native.py,sha256=hSLs8Jmz5aqayuengJJ3kdT5PwNpBWpKrmQSdipndC8,210
|
11 |
+
markupsafe/_speedups.c,sha256=O7XulmTo-epI6n2FtMVOrJXl8EAaIwD2iNYmBI5SEoQ,4149
|
12 |
+
markupsafe/_speedups.cpython-313-darwin.so,sha256=zqa2NWhnDkGCuJVPBpLRKADVKQXphH3cRvm6rY8Hvds,50624
|
13 |
+
markupsafe/_speedups.pyi,sha256=ENd1bYe7gbBUf2ywyYWOGUpnXOHNJ-cgTNqetlW8h5k,41
|
14 |
+
markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
venv/lib/python3.13/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Wheel-Version: 1.0
|
2 |
+
Generator: setuptools (75.2.0)
|
3 |
+
Root-Is-Purelib: false
|
4 |
+
Tag: cp313-cp313-macosx_11_0_arm64
|
5 |
+
|