diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..592400a --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,59 @@ + + +exclude: \.(csv|sql)$|devcontainer\.json +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + # - id: check-added-large-files + - id: check-case-conflict + - id: check-json + - id: check-merge-conflict + - id: check-yaml + args: [--unsafe] + - id: end-of-file-fixer + exclude: "__snapshots__/" + - id: fix-encoding-pragma + args: [--remove] + - id: mixed-line-ending + - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] + exclude: "__snapshots__/" + + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.4.9 + hooks: + # Run the linter. + - id: ruff + args: [--fix] + # Run the formatter. + - id: ruff-format + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: "v1.8.0" + hooks: + - id: mypy + exclude: migrations/|commands/|scripts/|sandbox/|samples|sdk + additional_dependencies: + - "pygithub" + - "pytest" + - "syrupy" + - "typer" + - "jinja2" + - "gitpython" + - "types-PyYAML" + - "devtools" + args: + [ + --pretty, + --show-error-codes, + --implicit-optional, + --follow-imports=silent, + --warn-redundant-casts, + --warn-unused-ignores, + --disallow-any-generics, + --check-untyped-defs, + --no-implicit-reexport, + --disallow-untyped-defs, + ] diff --git a/README.md b/README.md index 5be7caa..18ad701 100644 --- a/README.md +++ b/README.md @@ -1 +1 @@ -# auto-detect-exceptions \ No newline at end of file +# auto-detect-exceptions diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..250ea12 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,448 @@ +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. + +[[package]] +name = "black" +version = "25.1.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\""} + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "filelock" +version = "3.15.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "identify" +version = "2.6.0" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "libcst" +version = "1.6.0" +description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.13 programs." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "libcst-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f02d0da6dfbad44e6ec4d1e5791e17afe95d9fe89bce4374bf109fd9c103a50"}, + {file = "libcst-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48406225378ee9208edb1e5a10451bea810262473af1a2f2473737fd16d34e3a"}, + {file = "libcst-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bf59a21e9968dc4e7c301fac660bf54bc7d4dcadc0b1abf31b1cac34e800555"}, + {file = "libcst-1.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d65550ac686bff9395398afacbc88fe812363703a4161108e8a6db066d30b96e"}, + {file = "libcst-1.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5ac6d68364031f0b554d8920a69b33f25ec6ef351fa31b4e8f3676abb729ce36"}, + {file = "libcst-1.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0c0fb2f7b74605832cc38d79e9d104f92a8aaeec7bf8f2759b20c5ba3786a321"}, + {file = "libcst-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:1bd11863889b630fe41543b4eb5e2dd445447a7f89e6b58229e83c9e52a74942"}, + {file = "libcst-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a9e71a046b4a91950125967f5ee67389f25a2511103e5595508f0591a5f50bc0"}, + {file = "libcst-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df3f452e074893dfad7746a041caeb3cde75bd9fbca4ea7b223012e112d1da8c"}, + {file = "libcst-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31e45f88d4a9a8e5b690ed14a564fcbace14b10f5e7b6797d6d97f4226b395da"}, + {file = "libcst-1.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1bd00399d20bf93590b6f02647f8be08e2b730e050e6b7360f669254e69c98f5"}, + {file = "libcst-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d25132f24edc24895082589645dbb8972c0eff6c9716ff71932fa72643d7c74f"}, + {file = "libcst-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:38f3f25d4f5d8713cdb6a7bd41d75299de3c2416b9890a34d9b05417b8e64c1d"}, + {file = "libcst-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:91242ccbae6e7a070b33ebe03d3677c54bf678653538fbaa89597a59e4a13b2d"}, + {file = "libcst-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cd2b28688dabf0f7a166b47ab1c7d5c0b6ef8c9a05ad932618471a33fe591a4a"}, + {file = "libcst-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a12a4766ce5874ccb31a1cc095cff47e2fb35755954965fe77458d9e5b361a8"}, + {file = "libcst-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfcd78a5e775f155054ed50d047a260cd23f0f6a89ef2a57e10bdb9c697680b8"}, + {file = "libcst-1.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5786240358b122ad901bb0b7e6b7467085b2317333233d7c7d7cac46388fbd77"}, + {file = "libcst-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c527472093b5b64ffa65d33c472da38952827abbca18c786d559d6d6122bc891"}, + {file = "libcst-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:63a8893dfc344b9b08bfaf4e433b16a7e2e9361f8362fa73eaecc4d379c328ba"}, + {file = "libcst-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:4cd011fcd79b76be216440ec296057780223674bc2566662c4bc50d3c5ecd58e"}, + {file = "libcst-1.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:96506807dc01c9efcea8ab57d9ea18fdc87b85514cc8ee2f8568fab6df861f02"}, + {file = "libcst-1.6.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dac722aade8796a1e78662c3ed424f0ab9f1dc0e8fdf3088610354cdd709e53f"}, + {file = "libcst-1.6.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b8370d0f7092a17b7fcda0e1539d0162cf35a0c19af94842b09c9dddc382acd"}, + {file = "libcst-1.6.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e4fcd791cab0fe8287b6edd0d78512b6475b87d906562a5d2d0999cb6d23b8d"}, + {file = "libcst-1.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3fb953fc0155532f366ff40f6a23f191250134d6928e02074ae4eb3531fa6c30"}, + {file = "libcst-1.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2f3c85602e5a6d3aec0a8fc74230363f943004d7c2b2a6a1c09b320b61692241"}, + {file = "libcst-1.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:c4486921bebd33d67bbbd605aff8bfaefd2d13dc73c20c1fde2fb245880b7fd6"}, + {file = "libcst-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b3d274115d134a550fe8a0b38780a28a659d4a35ac6068c7c92fffe6661b519c"}, + {file = "libcst-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d45513f6cd3dbb2a80cf21a53bc6e6e560414edea17c474c784100e10aebe921"}, + {file = "libcst-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8c70a124d7a7d326abdc9a6261013c57d36f21c6c6370de5dd3e6a040c4ee5e"}, + {file = "libcst-1.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdc95df61838d708adb37e18af1615491f6cac59557fd11077664dd956fe4528"}, + {file = "libcst-1.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:05c32de72553cb93ff606c7d2421ce1eab1f0740c8c4b715444e2ae42f42b1b6"}, + {file = "libcst-1.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:69b705f5b1faa66f115ede52a970d7613d3a8fb988834f853f7fb46870a041d2"}, + {file = "libcst-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:984512829a80f963bfc1803342219a4264a8d4206df0a30eae9bce921357a938"}, + {file = "libcst-1.6.0.tar.gz", hash = "sha256:e80ecdbe3fa43b3793cae8fa0b07a985bd9a693edbe6e9d076f5422ecadbf0db"}, +] + +[package.dependencies] +pyyaml = ">=5.2" + +[package.extras] +dev = ["Sphinx (>=5.1.1)", "black (==24.8.0)", "build (>=0.10.0)", "coverage[toml] (>=4.5.4)", "fixit (==2.1.0)", "flake8 (==7.1.1)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.1.5)", "jupyter (>=1.0.0)", "maturin (>=1.7.0,<1.8)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.18)", "setuptools-rust (>=1.5.2)", "setuptools_scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==2.8.0)", "usort (==1.0.8.post1)"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +groups = ["main"] +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "3.7.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, + {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pytest" +version = "8.2.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, + {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.11\"" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "virtualenv" +version = "20.26.3" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[metadata] +lock-version = "2.1" +python-versions = "^3.10" +content-hash = "405f20ec70c3102d2f60a502f9edd15e14011fbce5503bd0b1325c62a3ec1145" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..fd6a4ed --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,30 @@ +[project] +name = "auto-detect-exceptions" +version = "0.1.0" +description = "" +authors = [ + { name = "lucemia", email = "lucemia@gmail.com" }, +] +readme = "README.md" + +[tool.poetry] +packages = [ + { include = "auto_detect_exceptions", from = "src" } +] + +[project.scripts] +detect1 = "auto_detect_exceptions.cli:main" + +[tool.poetry.dependencies] +python = "^3.10" +libcst = "^1.6.0" +black = "^25.1.0" + + +[tool.poetry.group.dev.dependencies] +pytest = "^8.2.2" +pre-commit = "^3.7.1" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/samples/main.py b/samples/main.py new file mode 100644 index 0000000..392d7fd --- /dev/null +++ b/samples/main.py @@ -0,0 +1,8 @@ +def ze(): + """ + Raises: + Exception: Description of when this error is raised. + ValueError: Description of when this error is raised. + """ + raise Exception("ze") + raise ValueError("ze") diff --git a/src/auto_detect_exceptions/__init__.py b/src/auto_detect_exceptions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/auto_detect_exceptions/ast_utils.py b/src/auto_detect_exceptions/ast_utils.py new file mode 100644 index 0000000..6641d98 --- /dev/null +++ b/src/auto_detect_exceptions/ast_utils.py @@ -0,0 +1,61 @@ +import ast +from typing import Dict, Optional + + +def parse_python_code(source_code: str) -> ast.Module: + """ + Parses Python source code into an Abstract Syntax Tree (AST). + + Args: + source_code (str): The Python source code as a string. + + Returns: + ast.Module: The root AST node of the parsed code. + """ + return ast.parse(source_code) + + +def extract_functions(tree: ast.Module) -> Dict[str, ast.FunctionDef]: + """ + Extracts all function definitions from an AST tree. + + Args: + tree (ast.Module): The AST representation of the code. + + Returns: + Dict[str, ast.FunctionDef]: A dictionary mapping function names to their AST nodes. + """ + functions = {} + for node in ast.walk(tree): + if isinstance(node, ast.FunctionDef): # Include only regular functions + functions[node.name] = node + return functions + + +def get_docstring(node: ast.FunctionDef) -> Optional[str]: + """ + Retrieves the docstring from a function node. + + Args: + node (ast.FunctionDef): The AST node representing a function. + + Returns: + Optional[str]: The function's docstring if present, otherwise None. + """ + return ast.get_docstring(node) + + +def has_exceptions_section(docstring: Optional[str]) -> bool: + """ + Checks if a docstring contains an 'Exceptions' or 'Raises' section. + + Args: + docstring (Optional[str]): The docstring text. + + Returns: + bool: True if the docstring contains an exceptions section, False otherwise. + """ + if docstring is None: + return False + docstring_lower = docstring.lower() + return "raises:" in docstring_lower or "exceptions:" in docstring_lower diff --git a/src/auto_detect_exceptions/bk/exception_analyzer copy.py b/src/auto_detect_exceptions/bk/exception_analyzer copy.py new file mode 100644 index 0000000..1870cf5 --- /dev/null +++ b/src/auto_detect_exceptions/bk/exception_analyzer copy.py @@ -0,0 +1,343 @@ +import ast +import inspect +import importlib +import sys +from typing import Set, Tuple, Dict, Union, List, Any +from pathlib import Path + + +def get_module_contents(module_name: str) -> Dict[str, Any]: + """ + Get all functions and classes from a module. + + Args: + module_name (str): The name of the module to inspect. + + Returns: + Dict[str, Any]: A dictionary of object names and their corresponding objects. + """ + module = importlib.import_module(module_name) + return {name: obj for name, obj in inspect.getmembers(module)} + + +def get_ast_from_object(obj: Any) -> ast.AST: + """ + Get the AST representation of an object (function or class). + + Args: + obj (Any): The object to analyze. + + Returns: + ast.AST: The AST representation of the object. + """ + try: + source = inspect.getsource(obj) + return ast.parse(source).body[0] + except (OSError, TypeError): + return None + + +def analyze_node( + node: ast.AST, global_scope: Dict[str, Dict[str, Any]], module_name: str +) -> Tuple[Set[str], Dict[str, Union[str, List[str]]]]: + """ + Analyze a node (function or class) for called functions and potentially raised exceptions with messages, + including exceptions from sub-function calls across different modules. + + Args: + node (ast.AST): The AST node to analyze. + global_scope (Dict[str, Dict[str, Any]]): A dictionary of modules and their contents. + module_name (str): The name of the module containing the current node. + + Returns: + Tuple[Set[str], Dict[str, Union[str, List[str]]]]: A tuple containing: + 1. Set of names of functions called within the analyzed node. + 2. Dictionary of exceptions that may be raised by the node, + where keys are exception types and values are either a string message + or a list of possible messages. + """ + function_calls: Set[str] = set() + potentially_raised_exceptions: Dict[str, Union[str, List[str]]] = {} + handled_exceptions: Set[str] = set() + reraised_exceptions: Set[str] = set() + + class ExceptionAnalyzer(ast.NodeVisitor): + def visit_Call(self, node: ast.Call) -> None: + if isinstance(node.func, ast.Name): + function_name = node.func.id + function_calls.add(function_name) + self.analyze_call(function_name) + elif isinstance(node.func, ast.Attribute): + if isinstance(node.func.value, ast.Name): + module_name = node.func.value.id + function_name = node.func.attr + function_calls.add(f"{module_name}.{function_name}") + self.analyze_call(function_name, module_name) + self.generic_visit(node) + + def analyze_call(self, function_name: str, module_name: str = None) -> None: + for mod_name, mod_contents in global_scope.items(): + if module_name and mod_name != module_name: + continue + if function_name in mod_contents: + sub_obj = mod_contents[function_name] + sub_node = get_ast_from_object(sub_obj) + if sub_node: + _, sub_exceptions = analyze_node( + sub_node, global_scope, mod_name + ) + self.update_exceptions(sub_exceptions) + break + + def update_exceptions( + self, sub_exceptions: Dict[str, Union[str, List[str]]] + ) -> None: + for exc, msg in sub_exceptions.items(): + if exc in potentially_raised_exceptions: + if isinstance(potentially_raised_exceptions[exc], str): + potentially_raised_exceptions[exc] = [ + potentially_raised_exceptions[exc], + msg, + ] + elif isinstance(potentially_raised_exceptions[exc], list): + if isinstance(msg, list): + potentially_raised_exceptions[exc].extend(msg) + else: + potentially_raised_exceptions[exc].append(msg) + else: + potentially_raised_exceptions[exc] = msg + + def visit_Raise(self, node: ast.Raise) -> None: + if node.exc is None: + if self.current_except_clause: + reraised_exceptions.update(self.current_except_clause) + else: + potentially_raised_exceptions["Unknown"] = "Re-raised exception" + else: + exc_name, exc_msg = self.get_exception_info(node.exc) + + if exc_name in potentially_raised_exceptions: + if isinstance(potentially_raised_exceptions[exc_name], str): + potentially_raised_exceptions[exc_name] = [ + potentially_raised_exceptions[exc_name], + exc_msg, + ] + elif isinstance(potentially_raised_exceptions[exc_name], list): + potentially_raised_exceptions[exc_name].append(exc_msg) + else: + potentially_raised_exceptions[exc_name] = exc_msg + + self.generic_visit(node) + + def get_exception_info(self, node: ast.expr) -> Tuple[str, str]: + if isinstance(node, ast.Call): + if isinstance(node.func, ast.Name): + exc_name = node.func.id + elif isinstance(node.func, ast.Attribute): + exc_name = node.func.attr + else: + exc_name = "Unknown" + + if node.args: + if isinstance(node.args[0], ast.Str): + exc_msg = node.args[0].s + elif isinstance(node.args[0], ast.Constant) and isinstance( + node.args[0].value, str + ): + exc_msg = node.args[0].value + else: + exc_msg = "Dynamic message" + else: + exc_msg = "No message" + elif isinstance(node, ast.Name): + exc_name = node.id + exc_msg = "No message" + else: + exc_name = "Unknown" + exc_msg = "Unknown message" + + return exc_name, exc_msg + + def visit_Assert(self, node: ast.Assert) -> None: + potentially_raised_exceptions["AssertionError"] = "Assertion failed" + self.generic_visit(node) + + def visit_Try(self, node: ast.Try) -> None: + outer_exceptions = ( + set(self.exception_stack[-1]) if self.exception_stack else set() + ) + + for handler in node.handlers: + if handler.type is None: # bare except: + handled_exceptions.update(potentially_raised_exceptions.keys()) + self.exception_stack.append( + set(potentially_raised_exceptions.keys()) + ) + self.current_except_clause = set( + potentially_raised_exceptions.keys() + ) + elif isinstance(handler.type, ast.Name): + handled_exceptions.add(handler.type.id) + self.exception_stack.append({handler.type.id}) + self.current_except_clause = {handler.type.id} + elif isinstance(handler.type, ast.Tuple): + handled_types = { + elt.id for elt in handler.type.elts if isinstance(elt, ast.Name) + } + handled_exceptions.update(handled_types) + self.exception_stack.append(handled_types) + self.current_except_clause = handled_types + + self.visit(handler) + self.exception_stack.pop() + self.current_except_clause = set() + + self.exception_stack.append(outer_exceptions) + for item in node.body: + self.visit(item) + self.exception_stack.pop() + + if node.orelse: + self.exception_stack.append(outer_exceptions) + for item in node.orelse: + self.visit(item) + self.exception_stack.pop() + + if node.finalbody: + self.exception_stack.append(set()) + for item in node.finalbody: + self.visit(item) + self.exception_stack.pop() + + analyzer = ExceptionAnalyzer() + analyzer.visit(node) + + actually_raised_exceptions = { + exc: msg + for exc, msg in potentially_raised_exceptions.items() + if exc not in handled_exceptions or exc in reraised_exceptions + } + + return function_calls, actually_raised_exceptions + + +def analyze_file( + file_path: str, analyzed_files: Set[str] = None +) -> Dict[str, Tuple[Set[str], Dict[str, Union[str, List[str]]]]]: + """ + Analyze all functions and classes in a Python file and its imported modules. + + Args: + file_path (str): Path to the Python file to analyze. + analyzed_files (Set[str], optional): Set of already analyzed file paths to avoid circular imports. + + Returns: + Dict[str, Tuple[Set[str], Dict[str, Union[str, List[str]]]]]: A dictionary where + keys are function/method names and values are tuples containing: + 1. Set of names of functions called within the analyzed function/method. + 2. Dictionary of exceptions that may be raised by the function/method. + """ + if analyzed_files is None: + analyzed_files = set() + + if file_path in analyzed_files: + return {} + + analyzed_files.add(file_path) + + with open(file_path, "r") as file: + content = file.read() + + tree = ast.parse(content) + + # Get the module name from the file path + module_name = Path(file_path).stem + + # Add the directory containing the file to sys.path to allow imports + sys.path.insert(0, str(Path(file_path).parent)) + + # Create a dictionary of global contents for the current module + global_scope = {module_name: {}} + + # Analyze imported modules + for node in ast.walk(tree): + if isinstance(node, ast.Import): + for alias in node.names: + imported_module = alias.name + try: + global_scope[imported_module] = get_module_contents(imported_module) + except ImportError: + print(f"Warning: Could not import module {imported_module}") + elif isinstance(node, ast.ImportFrom): + if node.level == 0: # absolute import + module = node.module + else: # relative import + module = f"{'.'.join([''] * (node.level - 1))}{node.module}" + try: + imported_contents = get_module_contents(module) + for alias in node.names: + if alias.name == "*": + global_scope[module] = imported_contents + else: + if module not in global_scope: + global_scope[module] = {} + global_scope[module][alias.name] = imported_contents.get( + alias.name + ) + except ImportError: + print(f"Warning: Could not import from module {module}") + + results = {} + for node in ast.walk(tree): + if isinstance(node, (ast.FunctionDef, ast.ClassDef)): + if isinstance(node, ast.FunctionDef): + object_name = node.name + global_scope[module_name][object_name] = compile( + ast.Module(body=[node], type_ignores=[]), + filename="", + mode="exec", + ) + function_calls, exceptions = analyze_node( + node, global_scope, module_name + ) + results[f"{module_name}.{object_name}"] = (function_calls, exceptions) + elif isinstance(node, ast.ClassDef): + class_name = node.name + for sub_node in node.body: + if isinstance(sub_node, ast.FunctionDef): + method_name = sub_node.name + full_name = f"{module_name}.{class_name}.{method_name}" + global_scope[module_name][full_name] = compile( + ast.Module(body=[sub_node], type_ignores=[]), + filename="", + mode="exec", + ) + function_calls, exceptions = analyze_node( + sub_node, global_scope, module_name + ) + results[full_name] = (function_calls, exceptions) + + # Remove the added path + sys.path.pop(0) + + return results + + +# Example usage +if __name__ == "__main__": + file_path = "example.py" # Replace with the path to your Python file + analysis_results = analyze_file(file_path) + + for func_name, (calls, exceptions) in analysis_results.items(): + print(f"\nAnalysis of {func_name}:") + print("Functions called:") + for call in calls: + print(f"- {call}") + print("Exceptions that may be raised:") + for exc, msg in exceptions.items(): + if isinstance(msg, list): + print(f"- {exc}: Multiple messages:") + for m in msg: + print(f" - {m}") + else: + print(f"- {exc}: {msg}") diff --git a/src/auto_detect_exceptions/bk/exception_analyzer.py b/src/auto_detect_exceptions/bk/exception_analyzer.py new file mode 100644 index 0000000..aa37683 --- /dev/null +++ b/src/auto_detect_exceptions/bk/exception_analyzer.py @@ -0,0 +1,62 @@ +from typing import Any +import ast + + +class ExceptionVisitor(ast.NodeVisitor): + def __init__(self) -> None: + self.potentially_raised_exceptions: set[str] = set() + self.handled_exceptions: set[str] = ( + set() + ) # exceptions caught in the current function + + def visit_Assert(self, node: ast.Assert) -> Any: + self.potentially_raised_exceptions.add("AssertionError") + return super().generic_visit(node) + + def visit_Raise(self, node: ast.Raise) -> None: + if node.exc is None: + # re-raise exception + self.potentially_raised_exceptions.update(self.handled_exceptions) + else: + exc_name, exc_msg = self.get_exception_info(node.exc) + + if exc_name not in self.potentially_raised_exceptions: + self.potentially_raised_exceptions.add(exc_name) + + self.generic_visit(node) + + def get_exception_info(self, node: ast.expr) -> tuple[str, str]: + if isinstance(node, ast.Call): + if isinstance(node.func, ast.Name): + exc_name = node.func.id + elif isinstance(node.func, ast.Attribute): + exc_name = node.func.attr + else: + exc_name = "Unknown" + + if node.args: + if isinstance(node.args[0], ast.Str): + exc_msg = node.args[0].s + elif isinstance(node.args[0], ast.Constant) and isinstance( + node.args[0].value, str + ): + exc_msg = node.args[0].value + else: + exc_msg = "Dynamic message" + else: + exc_msg = "No message" + elif isinstance(node, ast.Name): + exc_name = node.id + exc_msg = "No message" + else: + exc_name = "Unknown" + exc_msg = "Unknown message" + + return exc_name, exc_msg + + def visit_Try(self, node: ast.Try) -> Any: + # for handler in node.handlers: + # if handler.type is None: # bare exception: + + # self.handled_exceptions.add(handler.type) + return super().visit_Try(node) diff --git a/src/auto_detect_exceptions/cli.py b/src/auto_detect_exceptions/cli.py new file mode 100644 index 0000000..cce9c3c --- /dev/null +++ b/src/auto_detect_exceptions/cli.py @@ -0,0 +1,94 @@ +import argparse +from .file_utils import find_python_files, read_python_file, write_python_file +from .ast_utils import ( + parse_python_code, + extract_functions, + get_docstring, + has_exceptions_section, +) +from .exception_analysis import get_unhandled_exceptions +from .docstring_utils import update_function_docstrings + + +def process_directory(directory: str, modify: bool) -> None: + """ + Process a directory, analyzing Python files and optionally modifying them. + + Args: + directory (str): The directory to process. + modify (bool): If True, modifies files; otherwise, generates a report. + """ + python_files = find_python_files(directory) + missing_exceptions = {} + + for file_path in python_files: + source_code = read_python_file(file_path) + tree = parse_python_code(source_code) + functions = extract_functions(tree) + function_exceptions = {} + + for func_name, func_node in functions.items(): + docstring = get_docstring(func_node) + + if not has_exceptions_section(docstring): + exceptions = get_unhandled_exceptions(func_node, functions) + if exceptions: + function_exceptions[func_name] = exceptions + + if function_exceptions: + missing_exceptions[file_path] = function_exceptions + + if modify: + updated_code = update_function_docstrings( + source_code, function_exceptions + ) + write_python_file(file_path, updated_code) + + if not modify: + generate_report(missing_exceptions) + + +def generate_report(missing_exceptions: dict) -> None: + """ + Prints a report of functions missing exception documentation. + + Args: + missing_exceptions (dict): A dictionary mapping file paths to missing exception sections. + """ + print("\n=== Report: Missing Exception Docstrings ===\n") + + if not missing_exceptions: + print("āœ… All functions have proper exception documentation!") + return + + for file_path, functions in missing_exceptions.items(): + print(f"\nšŸ“‚ File: {file_path}") + for func_name, exceptions in functions.items(): + print(f" šŸ”¹ Function `{func_name}()` is missing exception documentation.") + print(f" Expected exceptions: {', '.join(exceptions)}") + + +def main(): + """ + Entry point for the CLI tool. + """ + parser = argparse.ArgumentParser( + description="Analyze Python files to ensure functions have proper exception documentation." + ) + + parser.add_argument( + "directory", type=str, help="Directory to scan for Python files" + ) + parser.add_argument( + "--update", + action="store_true", + help="Modify files to add missing exception docstrings", + ) + + args = parser.parse_args() + + process_directory(args.directory, modify=args.update) + + +if __name__ == "__main__": + main() diff --git a/src/auto_detect_exceptions/detect.py b/src/auto_detect_exceptions/detect.py new file mode 100644 index 0000000..f3dedde --- /dev/null +++ b/src/auto_detect_exceptions/detect.py @@ -0,0 +1,44 @@ +import importlib.util +import ast +from .exception_analyzer import ExceptionVisitor + + +def get_file_path_from_full_path(full_path: str) -> str: + module_path, _ = full_path.rsplit(".", 1) # Split off the function name + + # Convert module path to a probable file path + file_path = module_path.replace(".", "/") + ".py" + + # Check if the module is actually installed and find the exact path + try: + # Try to find the module without importing it + spec = importlib.util.find_spec(module_path) + if spec and spec.origin: + file_path = spec.origin # Update with the exact path from the spec + except ImportError: + # If the module isn't found, handle or raise error + print(f"Module {module_path} not found in system.") + raise ModuleNotFoundError() + + return file_path + + +def analyze_function(node) -> set[str]: + analyzer = ExceptionVisitor() + analyzer.visit(node) + + +def detect_function_exceptions(func: str) -> set[str]: + file_path = get_file_path_from_full_path(func) + + with open(file_path) as ifile: + content = ifile.read() + + tree = ast.parse(content) + + for node in ast.walk(tree): + if isinstance(node, ast.FunctionDef): + if node.name == func.split(".")[-1]: + return analyze_function(node) + + raise ModuleNotFoundError() diff --git a/src/auto_detect_exceptions/docstring_utils.py b/src/auto_detect_exceptions/docstring_utils.py new file mode 100644 index 0000000..e3daf49 --- /dev/null +++ b/src/auto_detect_exceptions/docstring_utils.py @@ -0,0 +1,75 @@ +import libcst as cst +from typing import Dict, Set + + +class DocstringUpdater(cst.CSTTransformer): + """ + Transformer that updates function docstrings to include missing exceptions. + """ + + def __init__(self, function_exceptions: Dict[str, Set[str]]): + self.function_exceptions = function_exceptions + + def leave_FunctionDef( + self, original_node: cst.FunctionDef, updated_node: cst.FunctionDef + ) -> cst.FunctionDef: + func_name = original_node.name.value + if func_name not in self.function_exceptions: + return updated_node # No changes needed + + # Extract the existing docstring + existing_docstring = None + if isinstance(original_node.body.body[0], cst.SimpleStatementLine): + first_stmt = original_node.body.body[0].body[0] + if isinstance(first_stmt, cst.Expr) and isinstance( + first_stmt.value, cst.SimpleString + ): + existing_docstring = first_stmt.value.value.strip( + "\"'" + ) # Strip triple quotes + + # Generate new exceptions section + exception_lines = ["Raises:"] + for exc in sorted(self.function_exceptions[func_name]): + exception_lines.append( + f" {exc}: Description of when this error is raised." + ) + exception_text = "\n".join(exception_lines) + + # Construct new docstring + if existing_docstring: + new_docstring = f"{existing_docstring}\n\n{exception_text}" + else: + new_docstring = exception_text + + # Replace or insert the docstring + new_docstring_node = cst.SimpleStatementLine( + body=[cst.Expr(value=cst.SimpleString(f'"""{new_docstring}"""'))] + ) + + # Insert new docstring into the function body + new_body = ( + [new_docstring_node] + list(updated_node.body.body[1:]) + if existing_docstring + else [new_docstring_node] + list(updated_node.body.body) + ) + + return updated_node.with_changes(body=cst.IndentedBlock(body=new_body)) + + +def update_function_docstrings( + source_code: str, function_exceptions: Dict[str, Set[str]] +) -> str: + """ + Uses `libcst` to update function docstrings in a Python source file. + + Args: + source_code (str): The original source code. + function_exceptions (Dict[str, Set[str]]): A mapping of function names to their exceptions. + + Returns: + str: The modified source code. + """ + tree = cst.parse_module(source_code) + updated_tree = tree.visit(DocstringUpdater(function_exceptions)) + return updated_tree.code diff --git a/src/auto_detect_exceptions/exception_analysis.py b/src/auto_detect_exceptions/exception_analysis.py new file mode 100644 index 0000000..aa7fceb --- /dev/null +++ b/src/auto_detect_exceptions/exception_analysis.py @@ -0,0 +1,118 @@ +import ast +from typing import Dict, Set, Optional + + +def get_called_function_name(node: ast.Call) -> Optional[str]: + """ + Extracts the function name from an `ast.Call` node. + + Args: + node (ast.Call): The AST node representing a function call. + + Returns: + Optional[str]: The function name if available, otherwise None. + """ + if isinstance(node.func, ast.Name): + return node.func.id # Direct function call: foo() + elif isinstance(node.func, ast.Attribute): + return node.func.attr # Method call: obj.foo() + return None + + +MAX_RECURSION_DEPTH = 50 # Prevent excessive recursion + + +def get_unhandled_exceptions( + node: ast.FunctionDef, + user_defined_funcs: Dict[str, ast.FunctionDef], + visited: Optional[Set[str]] = None, + depth: int = 0, +) -> Set[str]: + """ + Identifies all unhandled exceptions that a function may raise. + + Args: + node (ast.FunctionDef): The AST node of the function. + user_defined_funcs (Dict[str, ast.FunctionDef]): A mapping of function names to their AST nodes. + visited (Optional[Set[str]]): A set of already visited functions to prevent infinite loops. + depth (int): Current recursion depth. + + Returns: + Set[str]: A set of exception class names that may be raised. + """ + if visited is None: + visited = set() + + if depth > MAX_RECURSION_DEPTH: + print( + f"WARNING: Maximum recursion depth reached in `{node.name}`, stopping further analysis." + ) + return set() + + exceptions = set() + handled_exceptions = set() + + for child in ast.walk(node): + # Detect explicit `raise` statements + if isinstance(child, ast.Raise) and child.exc: + if isinstance(child.exc, ast.Call) and isinstance(child.exc.func, ast.Name): + exceptions.add(child.exc.func.id) + elif isinstance(child.exc, ast.Name): + exceptions.add(child.exc.id) + + # Detect try/except blocks and track caught exceptions + elif isinstance(child, ast.Try): + for handler in child.handlers: + if handler.type and isinstance(handler.type, ast.Name): + handled_exceptions.add(handler.type.id) + + # Detect function calls + elif isinstance(child, ast.Call): + func_name = get_called_function_name(child) + if func_name and func_name in user_defined_funcs: + # Pass `visited` set to avoid infinite recursion + exceptions |= resolve_exceptions_recursively( + func_name, user_defined_funcs, visited, depth + 1 + ) + + # Remove handled exceptions from the detected set + return exceptions - handled_exceptions + + +def resolve_exceptions_recursively( + func_name: str, + user_funcs: Dict[str, ast.FunctionDef], + visited: Optional[Set[str]] = None, + depth: int = 0, +) -> Set[str]: + """ + Recursively gathers exceptions from user-defined function calls. + + Args: + func_name (str): The function name to analyze. + user_funcs (Dict[str, ast.FunctionDef]): A dictionary mapping function names to their AST nodes. + visited (Set[str]): A set to track visited functions and prevent infinite recursion. + depth (int): Current recursion depth. + + Returns: + Set[str]: A set of exception names that may propagate from the function. + """ + if visited is None: + visited = set() + + if depth > MAX_RECURSION_DEPTH: + print( + f"WARNING: Maximum recursion depth reached in `{func_name}`, stopping further analysis." + ) + return set() # Prevent deep recursion + + if func_name in visited: + return set() # Prevent infinite recursion loops + + if func_name not in user_funcs: + return set() # Ignore functions not defined in this module + + visited.add(func_name) + func_node = user_funcs[func_name] + + return get_unhandled_exceptions(func_node, user_funcs, visited, depth) diff --git a/src/auto_detect_exceptions/file_utils.py b/src/auto_detect_exceptions/file_utils.py new file mode 100644 index 0000000..c4cbae4 --- /dev/null +++ b/src/auto_detect_exceptions/file_utils.py @@ -0,0 +1,46 @@ +from pathlib import Path +from typing import List + + +def find_python_files(directory: str) -> List[Path]: + """ + Recursively finds all Python (.py) files in the given directory. + + Args: + directory (str): The directory to search in. + + Returns: + List[Path]: A list of Path objects for Python files. + """ + return [p for p in Path(directory).rglob("*.py") if p.is_file()] + + +def read_python_file(filepath: Path) -> str: + """ + Reads the content of a Python file. + + Args: + filepath (Path): The path to the Python file. + + Returns: + str: The content of the file as a string. + """ + try: + return filepath.read_text(encoding="utf-8") + except Exception as e: + print(f"Error reading {filepath}: {e}") + return "" + + +def write_python_file(filepath: Path, updated_code: str) -> None: + """ + Writes updated content back to a Python file. + + Args: + filepath (Path): The path to the Python file. + updated_code (str): The modified source code to write. + """ + try: + filepath.write_text(updated_code, encoding="utf-8") + except Exception as e: + print(f"Error writing to {filepath}: {e}") diff --git a/src/auto_detect_exceptions/main.py b/src/auto_detect_exceptions/main.py new file mode 100644 index 0000000..ce62048 --- /dev/null +++ b/src/auto_detect_exceptions/main.py @@ -0,0 +1,88 @@ +import ast + +from .exception_analyzer import ExceptionVisitor + + +def analyze_function( + func_node: ast.FunctionDef, +) -> tuple[set[str], dict[str, str | list[str]]]: + """ + Analyze a function node for called functions and potentially raised exceptions with messages. + + Args: + func_node (ast.FunctionDef): The function node to analyze. + + Returns: + Tuple[Set[str], Dict[str, Union[str, List[str]]]]: A tuple containing: + 1. Set of names of functions called within the analyzed function. + 2. Dictionary of exceptions that may be raised by the function, + where keys are exception types and values are either a string message + or a list of possible messages. + """ + + analyzer = ExceptionVisitor() + analyzer.visit(func_node) + + # Calculate exceptions that may actually be raised + actually_raised_exceptions = { + exc: msg + for exc, msg in analyzer.potentially_raised_exceptions.items() + if exc not in analyzer.handled_exceptions or exc in analyzer.reraised_exceptions + } + + return analyzer.function_calls, actually_raised_exceptions + + +def analyze_file( + file_path: str, +) -> dict[str, tuple[set[str], dict[str, str | list[str]]]]: + """ + Analyze all functions in a Python file. + + Args: + file_path (str): Path to the Python file to analyze. + + Returns: + Dict[str, Tuple[Set[str], Dict[str, Union[str, List[str]]]]]: A dictionary where + keys are function names and values are tuples containing: + 1. Set of names of functions called within the analyzed function. + 2. Dictionary of exceptions that may be raised by the function. + """ + with open(file_path) as file: + content = file.read() + + tree = ast.parse(content) + + results = {} + for node in ast.walk(tree): + if isinstance(node, ast.FunctionDef): + function_name = node.name + function_calls, exceptions = analyze_function(node) + results[function_name] = (function_calls, exceptions) + + return results + + +def analysis_exception(file_path: str): + analysis_results = analyze_file(file_path) + + for func_name, (calls, exceptions) in analysis_results.items(): + print(f"\nAnalysis of {func_name}:") + print("Functions called:") + for call in calls: + print(f"- {call}") + print("Exceptions that may be raised:") + for exc, msg in exceptions.items(): + if isinstance(msg, list): + print(f"- {exc}: Multiple messages:") + for m in msg: + print(f" - {m}") + else: + print(f"- {exc}: {msg}") + + +# Example usage +import typer + +if __name__ == "__main__": + typer.run(analysis_exception) diff --git a/src/auto_detect_exceptions/tests/__init__.py b/src/auto_detect_exceptions/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/auto_detect_exceptions/tests/test_ast_utils.py b/src/auto_detect_exceptions/tests/test_ast_utils.py new file mode 100644 index 0000000..0d91680 --- /dev/null +++ b/src/auto_detect_exceptions/tests/test_ast_utils.py @@ -0,0 +1,64 @@ +import unittest +import ast +from ..ast_utils import ( + parse_python_code, + extract_functions, + get_docstring, + has_exceptions_section, +) + + +class TestASTUtils(unittest.TestCase): + def setUp(self): + """Set up test source code.""" + self.source_code = ''' +def foo(): + """This is a sample function. + + Raises: + ValueError: If something goes wrong. + """ + raise ValueError("Error") + +def bar(): + """This function has no exceptions section.""" + pass + +def baz(): + pass + ''' + self.tree = parse_python_code(self.source_code) + self.functions = extract_functions(self.tree) + + def test_parse_python_code(self): + """Test that parsing creates an AST module.""" + self.assertIsInstance(self.tree, ast.Module) + + def test_extract_functions(self): + """Test extracting function definitions.""" + self.assertIn("foo", self.functions) + self.assertIn("bar", self.functions) + self.assertIn("baz", self.functions) + self.assertEqual(len(self.functions), 3) + + def test_get_docstring(self): + """Test retrieving function docstrings.""" + self.assertEqual( + get_docstring(self.functions["foo"]).strip(), + "This is a sample function.\n\n Raises:\n ValueError: If something goes wrong.", + ) + self.assertEqual( + get_docstring(self.functions["bar"]).strip(), + "This function has no exceptions section.", + ) + self.assertIsNone(get_docstring(self.functions["baz"])) + + def test_has_exceptions_section(self): + """Test checking if a docstring contains an 'Exceptions' or 'Raises' section.""" + self.assertTrue(has_exceptions_section(get_docstring(self.functions["foo"]))) + self.assertFalse(has_exceptions_section(get_docstring(self.functions["bar"]))) + self.assertFalse(has_exceptions_section(get_docstring(self.functions["baz"]))) + + +if __name__ == "__main__": + unittest.main() diff --git a/src/auto_detect_exceptions/tests/test_cli.py b/src/auto_detect_exceptions/tests/test_cli.py new file mode 100644 index 0000000..b390141 --- /dev/null +++ b/src/auto_detect_exceptions/tests/test_cli.py @@ -0,0 +1,34 @@ +import unittest +import tempfile +from pathlib import Path +from ..cli import process_directory + + +class TestCLI(unittest.TestCase): + def setUp(self): + """Create a temporary directory and Python files for testing.""" + self.temp_dir = tempfile.TemporaryDirectory() + self.test_file = Path(self.temp_dir.name) / "test_script.py" + self.test_file.write_text(""" +def foo(): + raise ValueError("An error occurred") +""") + + def tearDown(self): + """Cleanup the temporary directory.""" + self.temp_dir.cleanup() + + def test_process_directory_report(self): + """Test processing a directory without modifying files.""" + process_directory(self.temp_dir.name, modify=False) + + def test_process_directory_update(self): + """Test modifying files to add exception docstrings.""" + process_directory(self.temp_dir.name, modify=True) + content = self.test_file.read_text() + self.assertIn("Raises:", content) + self.assertIn("ValueError", content) + + +if __name__ == "__main__": + unittest.main() diff --git a/src/auto_detect_exceptions/tests/test_detect_exceptions.py b/src/auto_detect_exceptions/tests/test_detect_exceptions.py new file mode 100644 index 0000000..e98992e --- /dev/null +++ b/src/auto_detect_exceptions/tests/test_detect_exceptions.py @@ -0,0 +1,177 @@ +import pytest +from typing import Any +from ..detect import detect_function_exceptions +from typing import Callable + + +def func_value_error(x: Any) -> Any: + raise ValueError("An error occurred") + + +def func_type_error(x: Any) -> Any: + raise TypeError("Another error occurred") + + +def func_no_error() -> Any: + return "No error" + + +def func_multiple_errors(x: Any) -> Any: + if x == 0: + raise ValueError("x cannot be zero") + elif x == 1: + raise TypeError("x must be an integer") + else: + raise RuntimeError("General error") + + +def func_nested_errors(x: Any) -> Any: + try: + if x == 0: + raise ValueError("x cannot be zero") + elif x == 1: + raise TypeError("x must be an integer") + except ValueError: + raise KeyError("Caught a ValueError") + else: + if x == 2: + raise IndexError("x cannot be two") + finally: + print("Finally block executed") + + +# Test case 6: Function with nested try-except +def func_nested_try_except(x: Any) -> Any: + try: + try: + if x == 0: + raise ValueError("x cannot be zero") + elif x == 1: + raise TypeError("x must be an integer") + except ValueError: + raise KeyError("Caught a ValueError") + except KeyError: + raise IndexError("Caught a KeyError") + + +# Test case 7: Function with no arguments but raises exceptions +def func_with_no_arguments() -> Any: + raise AttributeError("Attribute error occurred") + + +# Test case 8: Function with try, except, finally, else +def func_with_try_except_finally_else(x: Any) -> Any: + try: + if x == 0: + raise ValueError("x cannot be zero") + elif x == 1: + raise TypeError("x must be an integer") + except ValueError: + raise KeyError("Caught a ValueError") + else: + if x == 2: + raise IndexError("x cannot be two") + finally: + print("Finally block executed") + + +# Test case 9: Function with nested function calls that raise exceptions +def func_with_nested_function_calls(x: Any) -> Any: + def nested_func(y: Any) -> Any: + if y == 0: + raise ValueError("y cannot be zero") + elif y == 1: + raise TypeError("y must be an integer") + else: + raise RuntimeError("General error in nested function") + + nested_func(x) + + +# Test case 10: Function with assert statement that raises AssertionError +def func_with_assert_statement(x: Any) -> Any: + assert x > 0, "x must be greater than zero" + + +# Test case 11: Function with recursive calls that raise exceptions +def func_recursive(x: Any) -> Any: + if x == 0: + raise ValueError("x cannot be zero") + else: + func_recursive(x - 1) + + +# Test case 12: except Parent Exception +def func_except_parent_exception(x: Any) -> Any: + try: + if x == 0: + raise ValueError("x cannot be zero") + elif x == 1: + raise TypeError("x must be an integer") + except Exception: + raise KeyError("Caught a ValueError") + + +# Test case 13: except parent exception with customize Exception +def func_except_parent_exception_customize(x: Any) -> Any: + class SubValueError(ValueError): ... + + try: + if x == 0: + raise SubValueError("x cannot be zero") + elif x == 1: + raise TypeError("x must be an integer") + except ValueError as e: + raise KeyError(f"Caught a ValueError {e}") + + +# Test case 14: except Bare +def func_except_bare(x: Any) -> Any: + try: + if x == 0: + raise ValueError("x cannot be zero") + elif x == 1: + raise TypeError("x must be an integer") + except: # noqa: E722 + raise KeyError("Caught a ValueError") + + +# Test case 15: multiple except +def func_multiple_except_blocks() -> None: + try: + print("In try block") + raise ValueError("Error in try block") # Raises an exception + except ValueError: + print("In ValueError except block") + raise TypeError("Error in ValueError except block") # Raises a new exception + except TypeError: + print("In TypeError except block") + except Exception: + print("In generic Exception except block") + finally: + print("In finally block") + + +test_cases = [ + (func_value_error, {"ValueError"}), + (func_type_error, {"TypeError"}), + (func_no_error, set()), + (func_multiple_errors, {"ValueError", "TypeError", "RuntimeError"}), + (func_nested_errors, {"KeyError", "TypeError", "IndexError"}), + (func_nested_try_except, {"KeyError", "IndexError"}), + (func_with_no_arguments, {"AttributeError"}), + (func_with_try_except_finally_else, {"KeyError", "TypeError", "IndexError"}), + (func_with_nested_function_calls, {"ValueError", "TypeError", "RuntimeError"}), + (func_with_assert_statement, {"AssertionError"}), + (func_recursive, {"ValueError"}), + (func_except_parent_exception, {"KeyError"}), + (func_except_parent_exception_customize, {"KeyError"}), + (func_except_bare, {"KeyError"}), + (func_multiple_except_blocks, {"ValueError", "TypeError"}), +] + + +@pytest.mark.parametrize("func,expected", test_cases) +def test_func(func: Callable[..., Any], expected: set[str]) -> None: + result = detect_function_exceptions(f"{func.__module__}.{func.__name__}") + assert result == expected, f"Expected {expected}, got {result}" diff --git a/src/auto_detect_exceptions/tests/test_docstring_utils.py b/src/auto_detect_exceptions/tests/test_docstring_utils.py new file mode 100644 index 0000000..b3318b9 --- /dev/null +++ b/src/auto_detect_exceptions/tests/test_docstring_utils.py @@ -0,0 +1,22 @@ +import unittest +from ..docstring_utils import update_function_docstrings + + +class TestDocstringUtils(unittest.TestCase): + def test_update_function_docstrings(self): + """Test modifying a function's docstring to include missing exceptions.""" + source_code = ''' +def foo(): + """This function does something.""" + raise ValueError("Error occurred") +''' + + function_exceptions = {"foo": {"ValueError"}} + updated_code = update_function_docstrings(source_code, function_exceptions) + + self.assertIn("Raises:", updated_code) + self.assertIn("ValueError", updated_code) + + +if __name__ == "__main__": + unittest.main() diff --git a/src/auto_detect_exceptions/tests/test_exception_analysis.py b/src/auto_detect_exceptions/tests/test_exception_analysis.py new file mode 100644 index 0000000..714284a --- /dev/null +++ b/src/auto_detect_exceptions/tests/test_exception_analysis.py @@ -0,0 +1,68 @@ +import unittest +import ast +from ..ast_utils import parse_python_code, extract_functions +from ..exception_analysis import ( + get_unhandled_exceptions, + get_called_function_name, + resolve_exceptions_recursively, +) + + +class TestExceptionAnalysis(unittest.TestCase): + def setUp(self): + """Set up test source code.""" + self.source_code = ''' +def foo(): + """This function raises a ValueError.""" + raise ValueError("An error occurred") + +def bar(): + """This function calls foo(), propagating its exception.""" + foo() + +def baz(): + """This function has a try/except block, handling exceptions.""" + try: + foo() + except ValueError: + pass + +def qux(): + """This function calls an external function, which we ignore.""" + print("Hello, World!") + ''' + self.tree = parse_python_code(self.source_code) + self.functions = extract_functions(self.tree) + + def test_get_unhandled_exceptions(self): + """Test detection of unhandled exceptions in function bodies.""" + self.assertEqual( + get_unhandled_exceptions(self.functions["foo"], self.functions), + {"ValueError"}, + ) + self.assertEqual( + get_unhandled_exceptions(self.functions["baz"], self.functions), set() + ) # Exception is handled + self.assertEqual( + get_unhandled_exceptions(self.functions["qux"], self.functions), set() + ) # No exception + + def test_get_called_function_name(self): + """Test extracting function names from function calls.""" + bar_node = self.functions["bar"] + for child in ast.walk(bar_node): + if isinstance(child, ast.Call): + self.assertEqual(get_called_function_name(child), "foo") + + def test_resolve_exceptions_recursively(self): + """Test recursive exception resolution across function calls.""" + self.assertEqual( + resolve_exceptions_recursively("bar", self.functions), {"ValueError"} + ) + self.assertEqual( + resolve_exceptions_recursively("baz", self.functions), set() + ) # Exception is handled + + +if __name__ == "__main__": + unittest.main() diff --git a/src/auto_detect_exceptions/tests/test_file_utils.py b/src/auto_detect_exceptions/tests/test_file_utils.py new file mode 100644 index 0000000..f8a0f9a --- /dev/null +++ b/src/auto_detect_exceptions/tests/test_file_utils.py @@ -0,0 +1,38 @@ +import unittest +from pathlib import Path +import tempfile +from ..file_utils import find_python_files, read_python_file, write_python_file + + +class TestFileUtils(unittest.TestCase): + def setUp(self): + """Set up a temporary directory with Python files for testing.""" + self.temp_dir = tempfile.TemporaryDirectory() + self.test_file = Path(self.temp_dir.name) / "test_script.py" + self.test_file.write_text("print('Hello, World!')") + + def tearDown(self): + """Clean up the temporary directory.""" + self.temp_dir.cleanup() + + def test_find_python_files(self): + """Test finding Python files in a directory.""" + py_files = find_python_files(self.temp_dir.name) + self.assertEqual(len(py_files), 1) + self.assertEqual(py_files[0].name, "test_script.py") + + def test_read_python_file(self): + """Test reading a Python file.""" + content = read_python_file(self.test_file) + self.assertEqual(content.strip(), "print('Hello, World!')") + + def test_write_python_file(self): + """Test writing to a Python file.""" + new_content = "print('Updated Content!')" + write_python_file(self.test_file, new_content) + content = read_python_file(self.test_file) + self.assertEqual(content.strip(), new_content) + + +if __name__ == "__main__": + unittest.main()