diff --git a/.gitattributes b/.gitattributes
index 9e866de1..6229091a 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,2 +1,2 @@
 *.js    text eol=lf
-*.json  text eol=lf
\ No newline at end of file
+*.json  text eol=lf
diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml
index 61935c3e..12319e56 100644
--- a/.github/workflows/validate.yml
+++ b/.github/workflows/validate.yml
@@ -10,8 +10,67 @@ env:
   FORCE_COLOR: 1
 
 jobs:
-  linuxNode18:
-    name: '[Linux] Node.js v18: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests'
+  code_quality:
+    name: '[Linux] Node.js v18: Code Quality and formatting checks'
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v2
+        with:
+          # For commitlint purpose ensure to have complete list of PR commits
+          # It's loose and imperfect assumption that PR has no more than 30 commits
+          fetch-depth: 30
+
+      - name: Retrieve last master commit (for `git diff` purposes)
+        run: |
+          git checkout -b pr
+          git fetch --prune --depth=30 origin +refs/heads/master:refs/remotes/origin/master
+          git checkout master
+          git checkout pr
+
+      - name: Retrieve dependencies from cache
+        id: cacheNpm
+        uses: actions/cache@v2
+        with:
+          path: |
+            ~/.npm
+            node_modules
+          key: npm-v18-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }}
+          restore-keys: |
+            npm-v18-${{ runner.os }}-${{ github.ref }}-
+            npm-v18-${{ runner.os }}-refs/heads/master-
+
+      - name: Install Node.js and npm
+        uses: actions/setup-node@v1
+        with:
+          node-version: 18.x
+
+      - name: Install dependencies
+        if: steps.cacheNpm.outputs.cache-hit != 'true'
+        run: |
+          npm update --no-save
+          npm update --save-dev --no-save
+
+      - name: Validate Prettier formatting
+        run: npm run prettier-check:updated
+
+      - name: Validate ESLint rules
+        run: npm run lint:updated
+
+      - name: Validate commit messages
+        if: github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id
+        run: npx commitlint -f master
+
+      - name: Validate changelog (if new version)
+        run: |
+          NEW_VERSION=`git diff -U0 master package.json | grep '"version": "' | tail -n 1 | grep -oE "[0-9]+\.[0-9]+\.[0-9]+"` || :
+          if [ -n "$NEW_VERSION" ];
+          then
+            npx dump-release-notes-from-cc-changelog $NEW_VERSION
+          fi
+
+  unit_tests:
+    name: '[Linux] Node.js v18: Unit tests'
     runs-on: ubuntu-latest
     strategy:
       matrix:
@@ -43,10 +102,10 @@ jobs:
             npm-v18-${{ runner.os }}-${{ github.ref }}-
             npm-v18-${{ runner.os }}-refs/heads/master-
 
-      - name: Set up Python 3.9
+      - name: Set up Python 3.10
         uses: actions/setup-python@v2
         with:
-          python-version: 3.9
+          python-version: '3.10'
 
       - name: Install Node.js and npm
         uses: actions/setup-node@v1
@@ -71,19 +130,6 @@ jobs:
         run: |
           npm update --no-save
           npm update --save-dev --no-save
-      - name: Validate Prettier formatting
-        run: npm run prettier-check:updated
-      - name: Validate ESLint rules
-        run: npm run lint:updated
-      - name: Validate commit messages
-        if: github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id
-        run: npx commitlint -f master
-      - name: Validate changelog (if new version)
-        run: |
-          NEW_VERSION=`git diff -U0 master package.json | grep '"version": "' | tail -n 1 | grep -oE "[0-9]+\.[0-9]+\.[0-9]+"` || :
-          if [ -n "$NEW_VERSION" ];
-          then
-            npx dump-release-notes-from-cc-changelog $NEW_VERSION
-          fi
+
       - name: Unit tests
         run: npm test
diff --git a/.gitignore b/.gitignore
index 64bdbd6a..194c199c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -42,6 +42,7 @@ admin.env
 #PYTHON STUFF
 *.py[co]
 __pycache__
+.venv
 
 #NODE STUFF
 package-lock.json
@@ -66,6 +67,7 @@ var/
 *.egg-info/
 .installed.cfg
 *.egg
+tests/base/.requirements-cache
 
 # Serverless directories
 .serverless
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 00000000..91e9aab5
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,19 @@
+repos:
+  - repo: https://github.com/pre-commit/pre-commit-hooks
+    rev: v4.5.0
+    hooks:
+      - id: trailing-whitespace
+      - id: end-of-file-fixer
+      - id: check-yaml
+      - id: check-added-large-files
+  - repo: https://github.com/pre-commit/mirrors-eslint
+    rev: v8.56.0
+    hooks:
+      - id: eslint
+  - repo: https://github.com/pre-commit/mirrors-prettier
+    rev: v3.1.0
+    hooks:
+      - id: prettier
+        types_or: [css, javascript, html, json, markdown, yaml]
+        args: [--semi, --single-quote, --trailing-comma, all]
+        verbose: true
diff --git a/.python-version b/.python-version
index bd28b9c5..c8cfe395 100644
--- a/.python-version
+++ b/.python-version
@@ -1 +1 @@
-3.9
+3.10
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 900a425b..c6156e0c 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -14,8 +14,8 @@ Welcome, and thanks in advance for your help!
 
 Pre-Reqs:
 
-- Python 3.9
-- [poetry](https://python-poetry.org/docs/) (if you use multiple versions of Python be sure to install it with python 3.9)
+- Python 3.10
+- [poetry](https://python-poetry.org/docs/) (if you use multiple versions of Python be sure to install it with python 3.10)
 - Perl (used in the tests)
 - Node v14 or v16
 
diff --git a/README.md b/README.md
index d9127adb..52a3ecb1 100644
--- a/README.md
+++ b/README.md
@@ -160,21 +160,53 @@ Instead of:
 
 ```toml
 [tool.poetry.dependencies]
-bottle = {git = "git@github.com/bottlepy/bottle.git", tag = "0.12.16"}
+bottle = {git = "git@github.com/bottlepy/bottle.git", tag = "0.12.25"}
 ```
 
 Use:
 
 ```toml
 [tool.poetry.dependencies]
-bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"}
+bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.25"}
 ```
 
 Or, if you have an SSH key configured:
 
 ```toml
 [tool.poetry.dependencies]
-bottle = {git = "ssh://git@github.com/bottlepy/bottle.git", tag = "0.12.16"}
+bottle = {git = "ssh://git@github.com/bottlepy/bottle.git", tag = "0.12.25"}
+```
+
+## :sparkles::snake::sparkles: PyProject PEP631/621
+
+If you include a `pyproject.toml` and have `dependencies` installed instead of a `requirements.txt` this will
+generate a requirements file from it. It is fully compatible with all options such as `zip` and
+`dockerizePip`. If you don't want this plugin to generate it for you, set the following option:
+
+```yaml
+custom:
+  pythonRequirements:
+    usePyProject: false
+```
+
+If your PyProject configuration includes custom dependency groups, they will not be installed automatically. To include them in the deployment package, use the `pyprojectWithGroups`.
+
+```yaml
+custom:
+  pythonRequirements:
+    pyprojectWithGroups:
+      - internal_dependencies
+      - lambda_dependencies
+```
+
+## :sparkles::snake::sparkles: Custom Python Package installation
+
+If your function rely on your code being installed as a python package use `installFolderAsPackage`. Ensure your code is structured as a python package or otherwise it will not be installed correctly (`pip install .`)
+
+```yaml
+custom:
+  pythonRequirements:
+    installFolderAsPackage: true
 ```
 
 ## Dealing with Lambda's size limitations
@@ -285,7 +317,7 @@ custom:
       name: ${self:provider.stage}-layerName
       description: Python requirements lambda layer
       compatibleRuntimes:
-        - python3.7
+        - python3.10
       licenseInfo: GPLv3
       allowedAccounts:
         - '*'
@@ -515,7 +547,7 @@ For usage of `dockerizePip` on Windows do Step 1 only if running serverless on w
 Some Python packages require extra OS dependencies to build successfully. To deal with this, replace the default image with a `Dockerfile` like:
 
 ```dockerfile
-FROM public.ecr.aws/sam/build-python3.9
+FROM public.ecr.aws/sam/build-python3.10
 
 # Install your dependencies
 RUN yum -y install mysql-devel
@@ -602,7 +634,7 @@ To handle native dependencies, it's recommended to use the Docker builder with t
 custom:
   pythonRequirements:
     # Can use any Python version supported by Scaleway
-    dockerImage: rg.fr-par.scw.cloud/scwfunctionsruntimes-public/python-dep:3.11
+    dockerImage: rg.fr-par.scw.cloud/scwfunctionsruntimes-public/python-dep:3.10
 ```
 
 ## Contributors
@@ -635,6 +667,8 @@ custom:
   - [@drice](https://github.com/drice)
   - [@ofercaspi](https://github.com/ofercaspi)
   - [@tpansino](https://github.com/tpansino)
+- PyProject PEP631/621
+  - [@jax-b](https://github.com/jax-b)
 - [@david-mk-lawrence](https://github.com/david-mk-lawrence) - added Lambda Layer support
 - [@bryantbriggs](https://github.com/bryantbiggs) - Fixing CI/CD
 - [@jacksgt](https://github.com/jacksgt) - Fixing pip issues
diff --git a/example/serverless.yml b/example/serverless.yml
index e5c4c924..3e56a2fe 100644
--- a/example/serverless.yml
+++ b/example/serverless.yml
@@ -2,7 +2,7 @@ service: sls-py-req-test
 
 provider:
   name: aws
-  runtime: python3.9
+  runtime: python3.10
 
 plugins:
   - serverless-python-requirements
diff --git a/example_native_deps/requirements.txt b/example_native_deps/requirements.txt
index cceb4cd4..6c17c191 100644
--- a/example_native_deps/requirements.txt
+++ b/example_native_deps/requirements.txt
@@ -1,3 +1,3 @@
 numpy==1.13.3
 scipy==1.0.0
-sklearn==0.0
\ No newline at end of file
+sklearn==0.0
diff --git a/example_native_deps/serverless.yml b/example_native_deps/serverless.yml
index cfbd4913..bb20b632 100644
--- a/example_native_deps/serverless.yml
+++ b/example_native_deps/serverless.yml
@@ -2,7 +2,7 @@ service: sls-py-req-test
 
 provider:
   name: aws
-  runtime: python3.9
+  runtime: python3.10
 
 plugins:
   - serverless-python-requirements
diff --git a/index.js b/index.js
index 81e50981..70be82c5 100644
--- a/index.js
+++ b/index.js
@@ -61,10 +61,13 @@ class ServerlessPythonRequirements {
         poetryWithGroups: [],
         poetryWithoutGroups: [],
         poetryOnlyGroups: [],
+        usePyProject: true,
+        pyprojectWithGroups: [],
+        installFolderAsPackage: false,
       },
       (this.serverless.service.custom &&
         this.serverless.service.custom.pythonRequirements) ||
-        {}
+        {},
     );
     if (
       options.pythonBin === this.serverless.service.provider.runtime &&
@@ -92,11 +95,11 @@ class ServerlessPythonRequirements {
       if (!this.warningLogged) {
         if (this.log) {
           this.log.warning(
-            'You provided a docker related option but dockerizePip is set to false.'
+            'You provided a docker related option but dockerizePip is set to false.',
           );
         } else {
           this.serverless.cli.log(
-            'WARNING: You provided a docker related option but dockerizePip is set to false.'
+            'WARNING: You provided a docker related option but dockerizePip is set to false.',
           );
         }
         this.warningLogged = true;
@@ -104,7 +107,7 @@ class ServerlessPythonRequirements {
     }
     if (options.dockerImage && options.dockerFile) {
       throw new Error(
-        'Python Requirements: you can provide a dockerImage or a dockerFile option, not both.'
+        'Python Requirements: you can provide a dockerImage or a dockerFile option, not both.',
       );
     }
 
@@ -241,8 +244,8 @@ class ServerlessPythonRequirements {
         .then(() =>
           injectAllRequirements.bind(this)(
             arguments[1].functionObj &&
-              arguments[1].functionObj.package.artifact
-          )
+              arguments[1].functionObj.package.artifact,
+          ),
         );
     };
 
diff --git a/lib/clean.js b/lib/clean.js
index 8aaf331e..349d61f4 100644
--- a/lib/clean.js
+++ b/lib/clean.js
@@ -26,8 +26,8 @@ function cleanup() {
 
   return BbPromise.all(
     artifacts.map((artifact) =>
-      fse.removeAsync(path.join(this.servicePath, artifact))
-    )
+      fse.removeAsync(path.join(this.servicePath, artifact)),
+    ),
   );
 }
 
@@ -58,7 +58,7 @@ function cleanupCache() {
         promises.push(fse.removeAsync(file));
       });
     return BbPromise.all(promises).finally(
-      () => cleanupProgress && cleanupProgress.remove()
+      () => cleanupProgress && cleanupProgress.remove(),
     );
   } else {
     if (this.serverless) {
diff --git a/lib/docker.js b/lib/docker.js
index 68cf935b..e7b75c4a 100644
--- a/lib/docker.js
+++ b/lib/docker.js
@@ -20,7 +20,7 @@ async function dockerCommand(options, pluginInstance) {
     ) {
       throw new pluginInstance.serverless.classes.Error(
         'docker not found! Please install it.',
-        'PYTHON_REQUIREMENTS_DOCKER_NOT_FOUND'
+        'PYTHON_REQUIREMENTS_DOCKER_NOT_FOUND',
       );
     }
     throw e;
@@ -42,7 +42,7 @@ async function buildImage(dockerFile, extraArgs, pluginInstance) {
   } else {
     throw new pluginInstance.serverless.classes.Error(
       'dockerRunCmdExtraArgs option must be an array',
-      'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS'
+      'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS',
     );
   }
 
@@ -72,7 +72,7 @@ function findTestFile(servicePath, pluginInstance) {
   }
   throw new pluginInstance.serverless.classes.Error(
     'Unable to find serverless.{yml|yaml|json} or requirements.txt for getBindPath()',
-    'PYTHON_REQUIREMENTS_MISSING_GET_BIND_PATH_FILE'
+    'PYTHON_REQUIREMENTS_MISSING_GET_BIND_PATH_FILE',
   );
 }
 
diff --git a/lib/inject.js b/lib/inject.js
index 12267376..c10a848b 100644
--- a/lib/inject.js
+++ b/lib/inject.js
@@ -21,7 +21,7 @@ function injectRequirements(
   requirementsPath,
   packagePath,
   injectionRelativePath,
-  options
+  options,
 ) {
   const noDeploy = new Set(options.noDeploy || []);
 
@@ -33,31 +33,31 @@ function injectRequirements(
         glob.sync([path.join(requirementsPath, '**')], {
           mark: true,
           dot: true,
-        })
+        }),
       )
         .map((file) => [
           file,
           path.join(
             injectionRelativePath,
-            path.relative(requirementsPath, file)
+            path.relative(requirementsPath, file),
           ),
         ])
         .filter(
           ([file, relativeFile]) =>
             !file.endsWith('/') &&
             !relativeFile.match(/^__pycache__[\\/]/) &&
-            !noDeploy.has(relativeFile.split(/([-\\/]|\.py$|\.pyc$)/, 1)[0])
+            !noDeploy.has(relativeFile.split(/([-\\/]|\.py$|\.pyc$)/, 1)[0]),
         )
         .map(([file, relativeFile]) =>
-          Promise.all([file, relativeFile, fse.statAsync(file)])
+          Promise.all([file, relativeFile, fse.statAsync(file)]),
         )
         .mapSeries(([file, relativeFile, fileStat]) =>
           zipFile(zip, relativeFile, fse.readFileAsync(file), {
             unixPermissions: fileStat.mode,
             createFolders: false,
-          })
+          }),
         )
-        .then(() => writeZip(zip, packagePath))
+        .then(() => writeZip(zip, packagePath)),
     );
 }
 
@@ -79,8 +79,8 @@ function moveModuleUp(source, target, module) {
         (file) =>
           file.startsWith(module + '/') ||
           file.startsWith('serverless_sdk/') ||
-          file.match(/^s_.*\.py/) !== null
-      )
+          file.match(/^s_.*\.py/) !== null,
+      ),
     )
     .map((srcZipObj) =>
       zipFile(
@@ -88,8 +88,8 @@ function moveModuleUp(source, target, module) {
         srcZipObj.name.startsWith(module + '/')
           ? srcZipObj.name.replace(module + '/', '')
           : srcZipObj.name,
-        srcZipObj.async('nodebuffer')
-      )
+        srcZipObj.async('nodebuffer'),
+      ),
     )
     .then(() => writeZip(targetZip, target));
 }
@@ -123,8 +123,8 @@ async function injectAllRequirements(funcArtifact) {
       await BbPromise.resolve(this.targetFuncs)
         .filter((func) =>
           (func.runtime || this.serverless.service.provider.runtime).match(
-            /^python.*/
-          )
+            /^python.*/,
+          ),
         )
         .map((func) => {
           if (!get(func, 'module')) {
@@ -139,11 +139,11 @@ async function injectAllRequirements(funcArtifact) {
               : funcArtifact;
             const newArtifact = path.join(
               '.serverless',
-              `${func.module}-${func.name}.zip`
+              `${func.module}-${func.name}.zip`,
             );
             func.package.artifact = newArtifact;
             return moveModuleUp(artifact, newArtifact, func.module).then(
-              () => func
+              () => func,
             );
           } else {
             return func;
@@ -156,7 +156,7 @@ async function injectAllRequirements(funcArtifact) {
                 path.join('.serverless', func.module, 'requirements'),
                 func.package.artifact,
                 injectionRelativePath,
-                this.options
+                this.options,
               );
         });
     } else if (!this.options.zip) {
@@ -164,7 +164,7 @@ async function injectAllRequirements(funcArtifact) {
         path.join('.serverless', 'requirements'),
         this.serverless.service.package.artifact || funcArtifact,
         injectionRelativePath,
-        this.options
+        this.options,
       );
     }
   } finally {
diff --git a/lib/layer.js b/lib/layer.js
index 6fe9ca4c..c21aa8f1 100644
--- a/lib/layer.js
+++ b/lib/layer.js
@@ -19,7 +19,7 @@ function zipRequirements() {
     reqChecksum,
     targetZipPath,
     this.options,
-    this.serverless
+    this.serverless,
   );
 
   const promises = [];
@@ -28,12 +28,12 @@ function zipRequirements() {
     if (this.progress && this.log) {
       layerProgress = this.progress.get('python-layer-requirements');
       layerProgress.update(
-        'Using cached Python Requirements Lambda Layer file'
+        'Using cached Python Requirements Lambda Layer file',
       );
       this.log.info('Found cached Python Requirements Lambda Layer file');
     } else {
       this.serverless.cli.log(
-        'Found cached Python Requirements Lambda Layer file'
+        'Found cached Python Requirements Lambda Layer file',
       );
     }
   } else {
@@ -42,8 +42,8 @@ function zipRequirements() {
 
     promises.push(
       addTree(rootZip.folder(runtimepath), src).then(() =>
-        writeZip(rootZip, zipCachePath)
-      )
+        writeZip(rootZip, zipCachePath),
+      ),
     );
   }
   return BbPromise.all(promises).then(() => {
@@ -75,7 +75,7 @@ function createLayers() {
         'Python requirements generated by serverless-python-requirements.',
       compatibleRuntimes: [this.serverless.service.provider.runtime],
     },
-    this.options.layer
+    this.options.layer,
   );
 
   return BbPromise.resolve();
diff --git a/lib/pip.js b/lib/pip.js
index 16a802b0..b9523710 100644
--- a/lib/pip.js
+++ b/lib/pip.js
@@ -7,7 +7,11 @@ const spawn = require('child-process-ext/spawn');
 const { quote } = require('shell-quote');
 const { buildImage, getBindPath, getDockerUid } = require('./docker');
 const { getStripCommand, getStripMode, deleteFiles } = require('./slim');
-const { isPoetryProject, pyprojectTomlToRequirements } = require('./poetry');
+const { isPoetryProject, poetryTomlToRequirements } = require('./poetry');
+const {
+  pyprojectTomlToRequirements,
+  isPyProjectProject,
+} = require('./pyproject');
 const {
   checkForAndDeleteMaxCacheVersions,
   sha256Path,
@@ -57,17 +61,19 @@ function mergeCommands(commands) {
 function generateRequirementsFile(
   requirementsPath,
   targetFile,
-  pluginInstance
+  pluginInstance,
 ) {
   const { serverless, servicePath, options, log } = pluginInstance;
   const modulePath = path.dirname(requirementsPath);
   if (options.usePoetry && isPoetryProject(modulePath)) {
     filterRequirementsFile(targetFile, targetFile, pluginInstance);
     if (log) {
-      log.info(`Parsed requirements.txt from pyproject.toml in ${targetFile}`);
+      log.info(
+        `Parsed requirements.txt from pyproject.toml poetry in ${targetFile}`,
+      );
     } else {
       serverless.cli.log(
-        `Parsed requirements.txt from pyproject.toml in ${targetFile}...`
+        `Parsed requirements.txt from pyproject.toml poetry in ${targetFile}...`,
       );
     }
   } else if (
@@ -77,24 +83,33 @@ function generateRequirementsFile(
     filterRequirementsFile(
       path.join(servicePath, '.serverless/requirements.txt'),
       targetFile,
-      pluginInstance
+      pluginInstance,
     );
     if (log) {
       log.info(`Parsed requirements.txt from Pipfile in ${targetFile}`);
     } else {
       serverless.cli.log(
-        `Parsed requirements.txt from Pipfile in ${targetFile}...`
+        `Parsed requirements.txt from Pipfile in ${targetFile}...`,
+      );
+    }
+  } else if (options.usePyProject && isPyProjectProject(modulePath)) {
+    filterRequirementsFile(targetFile, targetFile, pluginInstance);
+    if (log) {
+      log.info(`Parsed requirements.txt from pyproject.toml in ${targetFile}`);
+    } else {
+      serverless.cli.log(
+        `Parsed requirements.txt from pyproject.toml in ${targetFile}...`,
       );
     }
   } else {
     filterRequirementsFile(requirementsPath, targetFile, pluginInstance);
     if (log) {
       log.info(
-        `Generated requirements from ${requirementsPath} in ${targetFile}`
+        `Generated requirements from ${requirementsPath} in ${targetFile}`,
       );
     } else {
       serverless.cli.log(
-        `Generated requirements from ${requirementsPath} in ${targetFile}...`
+        `Generated requirements from ${requirementsPath} in ${targetFile}...`,
       );
     }
   }
@@ -115,7 +130,7 @@ async function pipAcceptsSystem(pythonBin, pluginInstance) {
     ) {
       throw new pluginInstance.serverless.classes.Error(
         `${pythonBin} not found! Install it according to the poetry docs.`,
-        'PYTHON_REQUIREMENTS_PYTHON_NOT_FOUND'
+        'PYTHON_REQUIREMENTS_PYTHON_NOT_FOUND',
       );
     }
     throw e;
@@ -141,7 +156,7 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
     installProgress.update('Installing requirements');
   } else {
     serverless.cli.log(
-      `Installing requirements from ${targetRequirementsTxt} ...`
+      `Installing requirements from ${targetRequirementsTxt} ...`,
     );
   }
 
@@ -166,7 +181,7 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
       if (options.dockerizePip) {
         throw new pluginInstance.serverless.classes.Error(
           'You cannot use --cache-dir with Docker any more, please use the new option useDownloadCache instead. Please see: https://github.com/UnitedIncome/serverless-python-requirements#caching for more details.',
-          'PYTHON_REQUIREMENTS_CACHE_DIR_DOCKER_INVALID'
+          'PYTHON_REQUIREMENTS_CACHE_DIR_DOCKER_INVALID',
         );
       } else {
         if (log) {
@@ -174,20 +189,20 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
             'You are using a deprecated --cache-dir inside\n' +
               '            your pipCmdExtraArgs which may not work properly, please use the\n' +
               '            useDownloadCache option instead.  Please see: \n' +
-              '            https://github.com/UnitedIncome/serverless-python-requirements#caching'
+              '            https://github.com/UnitedIncome/serverless-python-requirements#caching',
           );
         } else {
           serverless.cli.log(
-            '=================================================='
+            '==================================================',
           );
           serverless.cli.log(
             'Warning: You are using a deprecated --cache-dir inside\n' +
               '            your pipCmdExtraArgs which may not work properly, please use the\n' +
               '            useDownloadCache option instead.  Please see: \n' +
-              '            https://github.com/UnitedIncome/serverless-python-requirements#caching'
+              '            https://github.com/UnitedIncome/serverless-python-requirements#caching',
           );
           serverless.cli.log(
-            '=================================================='
+            '==================================================',
           );
         }
       }
@@ -199,19 +214,19 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
         '-t',
         dockerPathForWin(targetFolder),
         '-r',
-        dockerPathForWin(targetRequirementsTxt)
+        dockerPathForWin(targetRequirementsTxt),
       );
       // If we want a download cache...
       if (options.useDownloadCache) {
         const downloadCacheDir = path.join(
           getUserCachePath(options),
-          'downloadCacheslspyc'
+          'downloadCacheslspyc',
         );
         if (log) {
           log.info(`Using download cache directory ${downloadCacheDir}`);
         } else {
           serverless.cli.log(
-            `Using download cache directory ${downloadCacheDir}`
+            `Using download cache directory ${downloadCacheDir}`,
           );
         }
         fse.ensureDirSync(downloadCacheDir);
@@ -234,21 +249,21 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
         let buildDockerImageProgress;
         if (progress) {
           buildDockerImageProgress = progress.get(
-            'python-install-build-docker'
+            'python-install-build-docker',
           );
           buildDockerImageProgress.update(
-            `Building custom docker image from ${options.dockerFile}`
+            `Building custom docker image from ${options.dockerFile}`,
           );
         } else {
           serverless.cli.log(
-            `Building custom docker image from ${options.dockerFile}...`
+            `Building custom docker image from ${options.dockerFile}...`,
           );
         }
         try {
           dockerImage = await buildImage(
             options.dockerFile,
             options.dockerBuildCmdExtraArgs,
-            pluginInstance
+            pluginInstance,
           );
         } finally {
           buildDockerImageProgress && buildDockerImageProgress.remove();
@@ -264,7 +279,7 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
 
       // Prepare bind path depending on os platform
       const bindPath = dockerPathForWin(
-        await getBindPath(targetFolder, pluginInstance)
+        await getBindPath(targetFolder, pluginInstance),
       );
 
       dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`);
@@ -282,7 +297,7 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
           '-v',
           `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`,
           '-e',
-          'SSH_AUTH_SOCK=/tmp/ssh_sock'
+          'SSH_AUTH_SOCK=/tmp/ssh_sock',
         );
       }
 
@@ -291,13 +306,13 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
       if (options.useDownloadCache) {
         const downloadCacheDir = path.join(
           getUserCachePath(options),
-          'downloadCacheslspyc'
+          'downloadCacheslspyc',
         );
         if (log) {
           log.info(`Using download cache directory ${downloadCacheDir}`);
         } else {
           serverless.cli.log(
-            `Using download cache directory ${downloadCacheDir}`
+            `Using download cache directory ${downloadCacheDir}`,
           );
         }
         fse.ensureDirSync(downloadCacheDir);
@@ -305,7 +320,7 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
         // Ugh, this is so ugly, but someone has to fix getBindPath in some other way (eg: make it use
         // its own temp file)
         fse.closeSync(
-          fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w')
+          fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w'),
         );
         const windowsized = await getBindPath(downloadCacheDir, pluginInstance);
         // And now push it to a volume mount and to pip...
@@ -360,7 +375,7 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
       } else {
         throw new pluginInstance.serverless.classes.Error(
           'dockerRunCmdExtraArgs option must be an array',
-          'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS'
+          'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS',
         );
       }
 
@@ -410,7 +425,7 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
               : 'Please install it';
           throw new pluginInstance.serverless.classes.Error(
             `${cmd} not found! ${advice}`,
-            'PYTHON_REQUIREMENTS_COMMAND_NOT_FOUND'
+            'PYTHON_REQUIREMENTS_COMMAND_NOT_FOUND',
           );
         }
 
@@ -419,7 +434,7 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) {
             `Running "${cmd} ${args.join(' ')}" failed with: "${e.stderrBuffer
               .toString()
               .trim()}"`,
-            'PYTHON_REQUIREMENTS_DOCKER_COMMAND_FAILED'
+            'PYTHON_REQUIREMENTS_DOCKER_COMMAND_FAILED',
           );
         }
 
@@ -513,7 +528,7 @@ function filterRequirementsFile(source, target, { options, serverless, log }) {
           log.warning(`Stripping -e flag from requirement ${req}`);
         } else {
           serverless.cli.log(
-            `Warning: Stripping -e flag from requirement ${req}`
+            `Warning: Stripping -e flag from requirement ${req}`,
           );
         }
       }
@@ -533,6 +548,16 @@ function filterRequirementsFile(source, target, { options, serverless, log }) {
       filteredRequirements.unshift(item);
     }
   }
+
+  if (options.installFolderAsPackage) {
+    // Add the install folder as a package
+    // module_path = path.dirname(source).split(path.sep);
+    // module_path.pop();
+    // filteredRequirements.push(module_path.join(path.sep));
+
+    filteredRequirements.push('.');
+  }
+
   fse.writeFileSync(target, filteredRequirements.join('\n') + '\n');
 }
 
@@ -549,11 +574,11 @@ function copyVendors(vendorFolder, targetFolder, { serverless, log }) {
 
   if (log) {
     log.info(
-      `Copying vendor libraries from ${vendorFolder} to ${targetFolder}`
+      `Copying vendor libraries from ${vendorFolder} to ${targetFolder}`,
     );
   } else {
     serverless.cli.log(
-      `Copying vendor libraries from ${vendorFolder} to ${targetFolder}...`
+      `Copying vendor libraries from ${vendorFolder} to ${targetFolder}...`,
     );
   }
 
@@ -586,6 +611,10 @@ function requirementsFileExists(servicePath, options, fileName) {
     return true;
   }
 
+  if (options.usePyProject && isPyProjectProject(path.dirname(fileName))) {
+    return true;
+  }
+
   return false;
 }
 
@@ -603,12 +632,13 @@ function requirementsFileExists(servicePath, options, fileName) {
 async function installRequirementsIfNeeded(
   modulePath,
   funcOptions,
-  pluginInstance
+  pluginInstance,
 ) {
   const { servicePath, options, serverless } = pluginInstance;
   // Our source requirements, under our service path, and our module path (if specified)
   const fileName = path.join(servicePath, modulePath, options.fileName);
 
+  await poetryTomlToRequirements(modulePath, pluginInstance);
   await pyprojectTomlToRequirements(modulePath, pluginInstance);
 
   // Skip requirements generation, if requirements file doesn't exist
@@ -622,7 +652,7 @@ async function installRequirementsIfNeeded(
     requirementsTxtDirectory = path.join(
       servicePath,
       '.serverless',
-      modulePath
+      modulePath,
     );
   } else {
     requirementsTxtDirectory = path.join(servicePath, '.serverless');
@@ -636,11 +666,11 @@ async function installRequirementsIfNeeded(
   if (!fse.existsSync(slsReqsTxt) || fse.statSync(slsReqsTxt).size == 0) {
     if (pluginInstance.log) {
       pluginInstance.log.info(
-        `Skipping empty output requirements.txt file from ${slsReqsTxt}`
+        `Skipping empty output requirements.txt file from ${slsReqsTxt}`,
       );
     } else {
       serverless.cli.log(
-        `Skipping empty output requirements.txt file from ${slsReqsTxt}`
+        `Skipping empty output requirements.txt file from ${slsReqsTxt}`,
       );
     }
     return false;
@@ -654,7 +684,7 @@ async function installRequirementsIfNeeded(
     reqChecksum,
     requirementsTxtDirectory,
     options,
-    serverless
+    serverless,
   );
 
   // Check if our static cache is present and is valid
@@ -665,11 +695,11 @@ async function installRequirementsIfNeeded(
     ) {
       if (pluginInstance.log) {
         pluginInstance.log.info(
-          `Using static cache of requirements found at ${workingReqsFolder}`
+          `Using static cache of requirements found at ${workingReqsFolder}`,
         );
       } else {
         serverless.cli.log(
-          `Using static cache of requirements found at ${workingReqsFolder} ...`
+          `Using static cache of requirements found at ${workingReqsFolder} ...`,
         );
       }
       // We'll "touch" the folder, as to bring it to the start of the FIFO cache
@@ -705,7 +735,10 @@ async function installRequirementsIfNeeded(
   // Then touch our ".completed_requirements" file so we know we can use this for static cache
   if (options.useStaticCache) {
     fse.closeSync(
-      fse.openSync(path.join(workingReqsFolder, '.completed_requirements'), 'w')
+      fse.openSync(
+        path.join(workingReqsFolder, '.completed_requirements'),
+        'w',
+      ),
     );
   }
   return workingReqsFolder;
@@ -725,8 +758,8 @@ async function installAllRequirements() {
     let doneModules = [];
     const filteredFuncs = this.targetFuncs.filter((func) =>
       (func.runtime || this.serverless.service.provider.runtime).match(
-        /^python.*/
-      )
+        /^python.*/,
+      ),
     );
 
     for (const f of filteredFuncs) {
@@ -739,14 +772,14 @@ async function installAllRequirements() {
         const reqsInstalledAt = await installRequirementsIfNeeded(
           f.module,
           f,
-          this
+          this,
         );
         // Add modulePath into .serverless for each module so it's easier for injecting and for users to see where reqs are
         let modulePath = path.join(
           this.servicePath,
           '.serverless',
           `${f.module}`,
-          'requirements'
+          'requirements',
         );
         // Only do if we didn't already do it
         if (
@@ -775,7 +808,7 @@ async function installAllRequirements() {
     let symlinkPath = path.join(
       this.servicePath,
       '.serverless',
-      `requirements`
+      `requirements`,
     );
     // Only do if we didn't already do it
     if (
diff --git a/lib/pipenv.js b/lib/pipenv.js
index 1099b651..f5d14e42 100644
--- a/lib/pipenv.js
+++ b/lib/pipenv.js
@@ -22,7 +22,7 @@ async function getPipenvVersion() {
     } else {
       throw new this.serverless.classes.Error(
         `Unable to parse pipenv version!`,
-        'PYTHON_REQUIREMENTS_PIPENV_VERSION_ERROR'
+        'PYTHON_REQUIREMENTS_PIPENV_VERSION_ERROR',
       );
     }
   } catch (e) {
@@ -32,7 +32,7 @@ async function getPipenvVersion() {
     if (stderrBufferContent.includes('command not found')) {
       throw new this.serverless.classes.Error(
         `pipenv not found! Install it according to the pipenv docs.`,
-        'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND'
+        'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND',
       );
     } else {
       throw e;
@@ -54,10 +54,10 @@ async function pipfileToRequirements() {
   let generateRequirementsProgress;
   if (this.progress && this.log) {
     generateRequirementsProgress = this.progress.get(
-      'python-generate-requirements-pipfile'
+      'python-generate-requirements-pipfile',
     );
     generateRequirementsProgress.update(
-      'Generating requirements.txt from Pipfile'
+      'Generating requirements.txt from Pipfile',
     );
     this.log.info('Generating requirements.txt from Pipfile');
   } else {
@@ -90,11 +90,11 @@ async function pipfileToRequirements() {
           // No previous Pipfile.lock, we will try to generate it here
           if (this.log) {
             this.log.warning(
-              'No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.'
+              'No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.',
             );
           } else {
             this.serverless.cli.log(
-              'WARNING: No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.'
+              'WARNING: No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.',
             );
           }
           await spawn('pipenv', ['lock'], {
@@ -114,14 +114,14 @@ async function pipfileToRequirements() {
         ['lock', '--requirements', '--keep-outdated'],
         {
           cwd: this.servicePath,
-        }
+        },
       );
     }
 
     fse.ensureDirSync(path.join(this.servicePath, '.serverless'));
     fse.writeFileSync(
       path.join(this.servicePath, '.serverless/requirements.txt'),
-      removeEditableFlagFromRequirementsString(res.stdoutBuffer)
+      removeEditableFlagFromRequirementsString(res.stdoutBuffer),
     );
   } finally {
     generateRequirementsProgress && generateRequirementsProgress.remove();
diff --git a/lib/poetry.js b/lib/poetry.js
index 17e3268f..3028daf0 100644
--- a/lib/poetry.js
+++ b/lib/poetry.js
@@ -8,7 +8,7 @@ const tomlParse = require('@iarna/toml/parse-string');
 /**
  * poetry install
  */
-async function pyprojectTomlToRequirements(modulePath, pluginInstance) {
+async function poetryTomlToRequirements(modulePath, pluginInstance) {
   const { serverless, servicePath, options, log, progress } = pluginInstance;
 
   const moduleProjectPath = path.join(servicePath, modulePath);
@@ -19,7 +19,7 @@ async function pyprojectTomlToRequirements(modulePath, pluginInstance) {
   let generateRequirementsProgress;
   if (progress && log) {
     generateRequirementsProgress = progress.get(
-      'python-generate-requirements-toml'
+      'python-generate-requirements-toml',
     );
   }
 
@@ -39,7 +39,7 @@ async function pyprojectTomlToRequirements(modulePath, pluginInstance) {
       throw new serverless.classes.Error(
         'poetry.lock file not found - set requirePoetryLockFile to false to ' +
           'disable this error',
-        'MISSING_REQUIRED_POETRY_LOCK'
+        'MISSING_REQUIRED_POETRY_LOCK',
       );
     }
     emitMsg('Generating poetry.lock and requirements.txt from pyproject.toml');
@@ -69,7 +69,7 @@ async function pyprojectTomlToRequirements(modulePath, pluginInstance) {
         ],
         {
           cwd: moduleProjectPath,
-        }
+        },
       );
     } catch (e) {
       if (
@@ -78,7 +78,7 @@ async function pyprojectTomlToRequirements(modulePath, pluginInstance) {
       ) {
         throw new serverless.classes.Error(
           `poetry not found! Install it according to the poetry docs.`,
-          'PYTHON_REQUIREMENTS_POETRY_NOT_FOUND'
+          'PYTHON_REQUIREMENTS_POETRY_NOT_FOUND',
         );
       }
       throw e;
@@ -95,12 +95,12 @@ async function pyprojectTomlToRequirements(modulePath, pluginInstance) {
         log.info('The generated file contains -e flags, removing them');
       } else {
         serverless.cli.log(
-          'The generated file contains -e flags, removing them...'
+          'The generated file contains -e flags, removing them...',
         );
       }
       fse.writeFileSync(
         sourceRequirements,
-        requirementsContents.replace(editableFlag, '')
+        requirementsContents.replace(editableFlag, ''),
       );
     }
 
@@ -108,7 +108,7 @@ async function pyprojectTomlToRequirements(modulePath, pluginInstance) {
     fse.moveSync(
       sourceRequirements,
       path.join(servicePath, '.serverless', modulePath, 'requirements.txt'),
-      { overwrite: true }
+      { overwrite: true },
     );
   } finally {
     generateRequirementsProgress && generateRequirementsProgress.remove();
@@ -140,4 +140,4 @@ function isPoetryProject(servicePath) {
   return false;
 }
 
-module.exports = { pyprojectTomlToRequirements, isPoetryProject };
+module.exports = { poetryTomlToRequirements, isPoetryProject };
diff --git a/lib/pyproject.js b/lib/pyproject.js
new file mode 100644
index 00000000..41729df5
--- /dev/null
+++ b/lib/pyproject.js
@@ -0,0 +1,75 @@
+const fs = require('fs');
+const fse = require('fs-extra');
+const path = require('path');
+
+const tomlParse = require('@iarna/toml/parse-string');
+
+function pyprojectTomlToRequirements(modulePath, pluginInstance) {
+  const { serverless, servicePath, options, log, progress } = pluginInstance;
+
+  const moduleProjectPath = path.join(servicePath, modulePath);
+  if (!options.usePyProject || !isPyProjectProject(moduleProjectPath)) {
+    return;
+  }
+
+  let generateRequirementsProgress;
+  if (progress && log) {
+    generateRequirementsProgress = progress.get(
+      'python-generate-requirements-toml',
+    );
+  } else {
+    serverless.cli.log('Generating requirements.txt from pyproject.toml');
+  }
+
+  try {
+    const pyprojectPath = path.join(servicePath, 'pyproject.toml');
+    const pyprojectToml = fs.readFileSync(pyprojectPath);
+    const pyproject = tomlParse(pyprojectToml);
+
+    const dependencies = pyproject['project']['dependencies'];
+
+    if (options.pyprojectWithGroups) {
+      for (const optionalDep of options.pyprojectWithGroups) {
+        try {
+          dependencies.push(
+            ...pyproject['project']['optional-dependencies'][optionalDep],
+          );
+        } catch (e) {
+          if (log) {
+            log.warn(
+              'Optional dependency (%s) not found in pyproject.toml',
+              optionalDep,
+            );
+          }
+        }
+      }
+    }
+
+    fse.ensureDirSync(path.join(servicePath, '.serverless'));
+    fse.writeFileSync(
+      path.join(servicePath, '.serverless/requirements.txt'),
+      dependencies.join('\n'),
+    );
+  } finally {
+    generateRequirementsProgress && generateRequirementsProgress.remove();
+  }
+}
+
+function isPyProjectProject(servicePath) {
+  const pyprojectPath = path.join(servicePath, 'pyproject.toml');
+
+  if (!fse.existsSync(pyprojectPath)) {
+    return false;
+  }
+
+  const pyprojectToml = fs.readFileSync(pyprojectPath);
+  const pyproject = tomlParse(pyprojectToml);
+
+  if (pyproject['project'] && pyproject['project']['dependencies']) {
+    return true;
+  }
+
+  return false;
+}
+
+module.exports = { pyprojectTomlToRequirements, isPyProjectProject };
diff --git a/lib/shared.js b/lib/shared.js
index bebb3f09..d088d4dc 100644
--- a/lib/shared.js
+++ b/lib/shared.js
@@ -22,7 +22,7 @@ function checkForAndDeleteMaxCacheVersions({ serverless, options, log }) {
     // Get the list of our cache files
     const files = glob.sync(
       [path.join(getUserCachePath(options), '*_slspyc/')],
-      { mark: true }
+      { mark: true },
     );
     // Check if we have too many
     if (files.length >= options.staticCacheMaxVersions) {
@@ -46,11 +46,11 @@ function checkForAndDeleteMaxCacheVersions({ serverless, options, log }) {
       // Log the number of cache files flushed
       if (log) {
         log.info(
-          `Removed ${items} items from cache because of staticCacheMaxVersions`
+          `Removed ${items} items from cache because of staticCacheMaxVersions`,
         );
       } else {
         serverless.cli.log(
-          `Removed ${items} items from cache because of staticCacheMaxVersions`
+          `Removed ${items} items from cache because of staticCacheMaxVersions`,
         );
       }
     }
@@ -69,7 +69,7 @@ function getRequirementsWorkingPath(
   subfolder,
   requirementsTxtDirectory,
   options,
-  serverless
+  serverless,
 ) {
   // If we want to use the static cache
   if (options && options.useStaticCache) {
diff --git a/lib/zip.js b/lib/zip.js
index 3c21bbbf..741abeba 100644
--- a/lib/zip.js
+++ b/lib/zip.js
@@ -34,13 +34,13 @@ function addVendorHelper() {
             this.log.info(`Adding Python requirements helper to ${f.module}`);
           } else {
             this.serverless.cli.log(
-              `Adding Python requirements helper to ${f.module}...`
+              `Adding Python requirements helper to ${f.module}...`,
             );
           }
 
           return fse.copyAsync(
             path.resolve(__dirname, '../unzip_requirements.py'),
-            path.join(this.servicePath, f.module, 'unzip_requirements.py')
+            path.join(this.servicePath, f.module, 'unzip_requirements.py'),
           );
         });
     } else {
@@ -58,7 +58,7 @@ function addVendorHelper() {
 
       return fse.copyAsync(
         path.resolve(__dirname, '../unzip_requirements.py'),
-        path.join(this.servicePath, 'unzip_requirements.py')
+        path.join(this.servicePath, 'unzip_requirements.py'),
       );
     }
   }
@@ -82,15 +82,15 @@ function removeVendorHelper() {
         .map((f) => {
           if (this.log) {
             this.log.info(
-              `Removing Python requirements helper from ${f.module}`
+              `Removing Python requirements helper from ${f.module}`,
             );
           } else {
             this.serverless.cli.log(
-              `Removing Python requirements helper from ${f.module}...`
+              `Removing Python requirements helper from ${f.module}...`,
             );
           }
           return fse.removeAsync(
-            path.join(this.servicePath, f.module, 'unzip_requirements.py')
+            path.join(this.servicePath, f.module, 'unzip_requirements.py'),
           );
         });
     } else {
@@ -100,7 +100,7 @@ function removeVendorHelper() {
         this.serverless.cli.log('Removing Python requirements helper...');
       }
       return fse.removeAsync(
-        path.join(this.servicePath, 'unzip_requirements.py')
+        path.join(this.servicePath, 'unzip_requirements.py'),
       );
     }
   }
@@ -130,15 +130,15 @@ function packRequirements() {
           let packProgress;
           if (this.progress && this.log) {
             packProgress = this.progress.get(
-              `python-pack-requirements-${f.module}`
+              `python-pack-requirements-${f.module}`,
             );
             packProgress.update(
-              `Zipping required Python packages for ${f.module}`
+              `Zipping required Python packages for ${f.module}`,
             );
             this.log.info(`Zipping required Python packages for ${f.module}`);
           } else {
             this.serverless.cli.log(
-              `Zipping required Python packages for ${f.module}...`
+              `Zipping required Python packages for ${f.module}...`,
             );
           }
           f.package.patterns.push(`${f.module}/.requirements.zip`);
@@ -156,7 +156,7 @@ function packRequirements() {
       this.serverless.service.package.patterns.push('.requirements.zip');
       return addTree(new JSZip(), '.serverless/requirements')
         .then((zip) =>
-          writeZip(zip, path.join(this.servicePath, '.requirements.zip'))
+          writeZip(zip, path.join(this.servicePath, '.requirements.zip')),
         )
         .finally(() => packProgress && packProgress.remove());
     }
diff --git a/lib/zipTree.js b/lib/zipTree.js
index 1654f665..ac07b1f1 100644
--- a/lib/zipTree.js
+++ b/lib/zipTree.js
@@ -50,7 +50,7 @@ function writeZip(zip, targetPath) {
     zip
       .generateNodeStream(opts)
       .pipe(fse.createWriteStream(targetPath))
-      .on('finish', resolve)
+      .on('finish', resolve),
   ).then(() => null);
 }
 
@@ -74,9 +74,9 @@ function zipFile(zip, zipPath, bufferPromise, fileOpts) {
             // necessary to get the same hash when zipping the same content
             date: new Date(0),
           },
-          fileOpts
-        )
-      )
+          fileOpts,
+        ),
+      ),
     )
     .then(() => zip);
 }
diff --git a/package.json b/package.json
index c420e4d6..0bde328b 100644
--- a/package.json
+++ b/package.json
@@ -128,6 +128,7 @@
   },
   "prettier": {
     "semi": true,
-    "singleQuote": true
+    "singleQuote": true,
+    "trailingComma": "all"
   }
 }
diff --git a/test.js b/test.js
index b97f3fdc..7f17a52b 100644
--- a/test.js
+++ b/test.js
@@ -26,12 +26,12 @@ const mkCommand =
     options['env'] = Object.assign(
       { SLS_DEBUG: 'true' },
       process.env,
-      options['env']
+      options['env'],
     );
     const { error, stdout, stderr, status } = crossSpawn.sync(
       cmd,
       args,
-      options
+      options,
     );
     if (error && !options['noThrow']) {
       console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console
@@ -41,7 +41,7 @@ const mkCommand =
       console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console
       console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console
       throw new Error(
-        `${quote([cmd, ...args])} failed with status code ${status}`
+        `${quote([cmd, ...args])} failed with status code ${status}`,
       );
     }
     return stdout && stdout.toString().trim();
@@ -84,7 +84,7 @@ const teardown = () => {
         git(['checkout', 'serverless.yml']);
       } catch (err) {
         console.error(
-          `At ${cwd} failed to checkout 'serverless.yml' with ${err}.`
+          `At ${cwd} failed to checkout 'serverless.yml' with ${err}.`,
         );
         throw err;
       }
@@ -130,7 +130,7 @@ const availablePythons = (() => {
   const mapping = {};
   if (process.env.USE_PYTHON) {
     binaries.push(
-      ...process.env.USE_PYTHON.split(',').map((v) => v.toString().trim())
+      ...process.env.USE_PYTHON.split(',').map((v) => v.toString().trim()),
     );
   } else {
     // For running outside of CI
@@ -214,13 +214,13 @@ test(
     });
     t.true(
       stdout.includes(
-        `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z`
+        `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z`,
       ),
-      'docker command properly resolved'
+      'docker command properly resolved',
     );
     t.end();
   },
-  { skip: !canUseDocker() || brokenOn('win32') }
+  { skip: !canUseDocker() || brokenOn('win32') },
 );
 
 test('default pythonBin can package flask with default options', async (t) => {
@@ -234,7 +234,7 @@ test('default pythonBin can package flask with default options', async (t) => {
   t.end();
 });
 
-test('py3.9 packages have the same hash', async (t) => {
+test('py3.10 packages have the same hash', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -244,12 +244,12 @@ test('py3.9 packages have the same hash', async (t) => {
   t.equal(
     sha256File('.serverless/sls-py-req-test.zip'),
     fileHash,
-    'packages have the same hash'
+    'packages have the same hash',
   );
   t.end();
 });
 
-test('py3.9 can package flask with default options', async (t) => {
+test('py3.10 can package flask with default options', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -261,7 +261,7 @@ test('py3.9 can package flask with default options', async (t) => {
 });
 
 test(
-  'py3.9 can package flask with hashes',
+  'py3.10 can package flask with hashes',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -276,10 +276,10 @@ test(
     t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged');
     t.end();
   },
-  { skip: brokenOn('win32') }
+  { skip: brokenOn('win32') },
 );
 
-test('py3.9 can package flask with nested', async (t) => {
+test('py3.10 can package flask with nested', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -295,7 +295,7 @@ test('py3.9 can package flask with nested', async (t) => {
   t.end();
 });
 
-test('py3.9 can package flask with zip option', async (t) => {
+test('py3.10 can package flask with zip option', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -303,17 +303,17 @@ test('py3.9 can package flask with zip option', async (t) => {
   const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
   t.true(
     zipfiles.includes('.requirements.zip'),
-    'zipped requirements are packaged'
+    'zipped requirements are packaged',
   );
   t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged');
   t.false(
     zipfiles.includes(`flask${sep}__init__.py`),
-    "flask isn't packaged on its own"
+    "flask isn't packaged on its own",
   );
   t.end();
 });
 
-test('py3.9 can package flask with slim option', async (t) => {
+test('py3.10 can package flask with slim option', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -323,16 +323,16 @@ test('py3.9 can package flask with slim option', async (t) => {
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('.pyc')),
     [],
-    'no pyc files packaged'
+    'no pyc files packaged',
   );
   t.true(
     zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0,
-    '__main__.py files are packaged'
+    '__main__.py files are packaged',
   );
   t.end();
 });
 
-test('py3.9 can package flask with slim & slimPatterns options', async (t) => {
+test('py3.10 can package flask with slim & slimPatterns options', async (t) => {
   process.chdir('tests/base');
   copySync('_slimPatterns.yml', 'slimPatterns.yml');
   const path = npm(['pack', '../..']);
@@ -343,17 +343,17 @@ test('py3.9 can package flask with slim & slimPatterns options', async (t) => {
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('.pyc')),
     [],
-    'no pyc files packaged'
+    'no pyc files packaged',
   );
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('__main__.py')),
     [],
-    '__main__.py files are NOT packaged'
+    '__main__.py files are NOT packaged',
   );
   t.end();
 });
 
-test("py3.9 doesn't package bottle with noDeploy option", async (t) => {
+test("py3.10 doesn't package bottle with noDeploy option", async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -371,7 +371,7 @@ test("py3.9 doesn't package bottle with noDeploy option", async (t) => {
   t.end();
 });
 
-test('py3.9 can package boto3 with editable', async (t) => {
+test('py3.10 can package boto3 with editable', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -385,13 +385,13 @@ test('py3.9 can package boto3 with editable', async (t) => {
   t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged');
   t.true(
     zipfiles.includes(`botocore${sep}__init__.py`),
-    'botocore is packaged'
+    'botocore is packaged',
   );
   t.end();
 });
 
 test(
-  'py3.9 can package flask with dockerizePip option',
+  'py3.10 can package flask with dockerizePip option',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -402,11 +402,11 @@ test(
     t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged');
     t.end();
   },
-  { skip: !canUseDocker() || brokenOn('win32') }
+  { skip: !canUseDocker() || brokenOn('win32') },
 );
 
 test(
-  'py3.9 can package flask with slim & dockerizePip option',
+  'py3.10 can package flask with slim & dockerizePip option',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -417,20 +417,20 @@ test(
     t.deepEqual(
       zipfiles.filter((filename) => filename.endsWith('.pyc')),
       [],
-      '*.pyc files are NOT packaged'
+      '*.pyc files are NOT packaged',
     );
     t.true(
       zipfiles.filter((filename) => filename.endsWith('__main__.py')).length >
         0,
-      '__main__.py files are packaged'
+      '__main__.py files are packaged',
     );
     t.end();
   },
-  { skip: !canUseDocker() || brokenOn('win32') }
+  { skip: !canUseDocker() || brokenOn('win32') },
 );
 
 test(
-  'py3.9 can package flask with slim & dockerizePip & slimPatterns options',
+  'py3.10 can package flask with slim & dockerizePip & slimPatterns options',
   async (t) => {
     process.chdir('tests/base');
     copySync('_slimPatterns.yml', 'slimPatterns.yml');
@@ -442,20 +442,20 @@ test(
     t.deepEqual(
       zipfiles.filter((filename) => filename.endsWith('.pyc')),
       [],
-      '*.pyc files are packaged'
+      '*.pyc files are packaged',
     );
     t.deepEqual(
       zipfiles.filter((filename) => filename.endsWith('__main__.py')),
       [],
-      '__main__.py files are NOT packaged'
+      '__main__.py files are NOT packaged',
     );
     t.end();
   },
-  { skip: !canUseDocker() || brokenOn('win32') }
+  { skip: !canUseDocker() || brokenOn('win32') },
 );
 
 test(
-  'py3.9 can package flask with zip & dockerizePip option',
+  'py3.10 can package flask with zip & dockerizePip option',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -463,31 +463,31 @@ test(
     sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } });
     const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
     const zippedReqs = await listRequirementsZipFiles(
-      '.serverless/sls-py-req-test.zip'
+      '.serverless/sls-py-req-test.zip',
     );
     t.true(
       zipfiles.includes('.requirements.zip'),
-      'zipped requirements are packaged'
+      'zipped requirements are packaged',
     );
     t.true(
       zipfiles.includes(`unzip_requirements.py`),
-      'unzip util is packaged'
+      'unzip util is packaged',
     );
     t.false(
       zipfiles.includes(`flask${sep}__init__.py`),
-      "flask isn't packaged on its own"
+      "flask isn't packaged on its own",
     );
     t.true(
       zippedReqs.includes(`flask/__init__.py`),
-      'flask is packaged in the .requirements.zip file'
+      'flask is packaged in the .requirements.zip file',
     );
     t.end();
   },
-  { skip: !canUseDocker() || brokenOn('win32') }
+  { skip: !canUseDocker() || brokenOn('win32') },
 );
 
 test(
-  'py3.9 can package flask with zip & slim & dockerizePip option',
+  'py3.10 can package flask with zip & slim & dockerizePip option',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -497,30 +497,30 @@ test(
     });
     const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
     const zippedReqs = await listRequirementsZipFiles(
-      '.serverless/sls-py-req-test.zip'
+      '.serverless/sls-py-req-test.zip',
     );
     t.true(
       zipfiles.includes('.requirements.zip'),
-      'zipped requirements are packaged'
+      'zipped requirements are packaged',
     );
     t.true(
       zipfiles.includes(`unzip_requirements.py`),
-      'unzip util is packaged'
+      'unzip util is packaged',
     );
     t.false(
       zipfiles.includes(`flask${sep}__init__.py`),
-      "flask isn't packaged on its own"
+      "flask isn't packaged on its own",
     );
     t.true(
       zippedReqs.includes(`flask/__init__.py`),
-      'flask is packaged in the .requirements.zip file'
+      'flask is packaged in the .requirements.zip file',
     );
     t.end();
   },
-  { skip: !canUseDocker() || brokenOn('win32') }
+  { skip: !canUseDocker() || brokenOn('win32') },
 );
 
-test('pipenv py3.9 can package flask with default options', async (t) => {
+test('pipenv py3.10 can package flask with default options', async (t) => {
   process.chdir('tests/pipenv');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -530,12 +530,12 @@ test('pipenv py3.9 can package flask with default options', async (t) => {
   t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged');
   t.false(
     zipfiles.includes(`pytest${sep}__init__.py`),
-    'dev-package pytest is NOT packaged'
+    'dev-package pytest is NOT packaged',
   );
   t.end();
 });
 
-test('pipenv py3.9 can package flask with slim option', async (t) => {
+test('pipenv py3.10 can package flask with slim option', async (t) => {
   process.chdir('tests/pipenv');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -545,16 +545,16 @@ test('pipenv py3.9 can package flask with slim option', async (t) => {
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('.pyc')),
     [],
-    'no pyc files packaged'
+    'no pyc files packaged',
   );
   t.true(
     zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0,
-    '__main__.py files are packaged'
+    '__main__.py files are packaged',
   );
   t.end();
 });
 
-test('pipenv py3.9 can package flask with slim & slimPatterns options', async (t) => {
+test('pipenv py3.10 can package flask with slim & slimPatterns options', async (t) => {
   process.chdir('tests/pipenv');
 
   copySync('_slimPatterns.yml', 'slimPatterns.yml');
@@ -566,17 +566,17 @@ test('pipenv py3.9 can package flask with slim & slimPatterns options', async (t
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('.pyc')),
     [],
-    'no pyc files packaged'
+    'no pyc files packaged',
   );
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('__main__.py')),
     [],
-    '__main__.py files are NOT packaged'
+    '__main__.py files are NOT packaged',
   );
   t.end();
 });
 
-test('pipenv py3.9 can package flask with zip option', async (t) => {
+test('pipenv py3.10 can package flask with zip option', async (t) => {
   process.chdir('tests/pipenv');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -584,17 +584,17 @@ test('pipenv py3.9 can package flask with zip option', async (t) => {
   const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
   t.true(
     zipfiles.includes('.requirements.zip'),
-    'zipped requirements are packaged'
+    'zipped requirements are packaged',
   );
   t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged');
   t.false(
     zipfiles.includes(`flask${sep}__init__.py`),
-    "flask isn't packaged on its own"
+    "flask isn't packaged on its own",
   );
   t.end();
 });
 
-test("pipenv py3.9 doesn't package bottle with noDeploy option", async (t) => {
+test("pipenv py3.10 doesn't package bottle with noDeploy option", async (t) => {
   process.chdir('tests/pipenv');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -633,7 +633,7 @@ test('non poetry pyproject.toml without requirements.txt packages handler only',
   t.end();
 });
 
-test('poetry py3.9 can package flask with default options', async (t) => {
+test('poetry py3.10 can package flask with default options', async (t) => {
   process.chdir('tests/poetry');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -645,7 +645,7 @@ test('poetry py3.9 can package flask with default options', async (t) => {
   t.end();
 });
 
-test('poetry py3.9 can package flask with slim option', async (t) => {
+test('poetry py3.10 can package flask with slim option', async (t) => {
   process.chdir('tests/poetry');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -655,16 +655,16 @@ test('poetry py3.9 can package flask with slim option', async (t) => {
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('.pyc')),
     [],
-    'no pyc files packaged'
+    'no pyc files packaged',
   );
   t.true(
     zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0,
-    '__main__.py files are packaged'
+    '__main__.py files are packaged',
   );
   t.end();
 });
 
-test('poetry py3.9 can package flask with slim & slimPatterns options', async (t) => {
+test('poetry py3.10 can package flask with slim & slimPatterns options', async (t) => {
   process.chdir('tests/poetry');
 
   copySync('_slimPatterns.yml', 'slimPatterns.yml');
@@ -676,17 +676,17 @@ test('poetry py3.9 can package flask with slim & slimPatterns options', async (t
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('.pyc')),
     [],
-    'no pyc files packaged'
+    'no pyc files packaged',
   );
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('__main__.py')),
     [],
-    '__main__.py files are NOT packaged'
+    '__main__.py files are NOT packaged',
   );
   t.end();
 });
 
-test('poetry py3.9 can package flask with zip option', async (t) => {
+test('poetry py3.10 can package flask with zip option', async (t) => {
   process.chdir('tests/poetry');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -694,17 +694,17 @@ test('poetry py3.9 can package flask with zip option', async (t) => {
   const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
   t.true(
     zipfiles.includes('.requirements.zip'),
-    'zipped requirements are packaged'
+    'zipped requirements are packaged',
   );
   t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged');
   t.false(
     zipfiles.includes(`flask${sep}__init__.py`),
-    "flask isn't packaged on its own"
+    "flask isn't packaged on its own",
   );
   t.end();
 });
 
-test("poetry py3.9 doesn't package bottle with noDeploy option", async (t) => {
+test("poetry py3.10 doesn't package bottle with noDeploy option", async (t) => {
   process.chdir('tests/poetry');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -722,7 +722,7 @@ test("poetry py3.9 doesn't package bottle with noDeploy option", async (t) => {
   t.end();
 });
 
-test('py3.9 can package flask with zip option and no explicit include', async (t) => {
+test('py3.10 can package flask with zip option and no explicit include', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -732,17 +732,17 @@ test('py3.9 can package flask with zip option and no explicit include', async (t
   const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
   t.true(
     zipfiles.includes('.requirements.zip'),
-    'zipped requirements are packaged'
+    'zipped requirements are packaged',
   );
   t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged');
   t.false(
     zipfiles.includes(`flask${sep}__init__.py`),
-    "flask isn't packaged on its own"
+    "flask isn't packaged on its own",
   );
   t.end();
 });
 
-test('py3.9 can package lambda-decorators using vendor option', async (t) => {
+test('py3.10 can package lambda-decorators using vendor option', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -752,7 +752,7 @@ test('py3.9 can package lambda-decorators using vendor option', async (t) => {
   t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged');
   t.true(
     zipfiles.includes(`lambda_decorators.py`),
-    'lambda_decorators.py is packaged'
+    'lambda_decorators.py is packaged',
   );
   t.end();
 });
@@ -780,32 +780,32 @@ test(
     t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged');
     t.true(
       zipfiles.includes(`lambda_decorators.py`),
-      'lambda_decorators.py is packaged'
+      'lambda_decorators.py is packaged',
     );
     t.true(zipfiles.includes(`foobar`), 'foobar is packaged');
 
     const zipfiles_with_metadata = await listZipFilesWithMetaData(
-      '.serverless/sls-py-req-test.zip'
+      '.serverless/sls-py-req-test.zip',
     );
     t.true(
       zipfiles_with_metadata['foobar'].unixPermissions
         .toString(8)
         .slice(3, 6) === perm,
-      'foobar has retained its executable file permissions'
+      'foobar has retained its executable file permissions',
     );
 
     const flaskPerm = statSync('.serverless/requirements/bin/flask').mode;
     t.true(
       zipfiles_with_metadata['bin/flask'].unixPermissions === flaskPerm,
-      'bin/flask has retained its executable file permissions'
+      'bin/flask has retained its executable file permissions',
     );
 
     t.end();
   },
-  { skip: process.platform === 'win32' }
+  { skip: process.platform === 'win32' },
 );
 
-test('py3.9 can package flask in a project with a space in it', async (t) => {
+test('py3.10 can package flask in a project with a space in it', async (t) => {
   copySync('tests/base', 'tests/base with a space');
   process.chdir('tests/base with a space');
   const path = npm(['pack', '../..']);
@@ -818,7 +818,7 @@ test('py3.9 can package flask in a project with a space in it', async (t) => {
 });
 
 test(
-  'py3.9 can package flask in a project with a space in it with docker',
+  'py3.10 can package flask in a project with a space in it with docker',
   async (t) => {
     copySync('tests/base', 'tests/base with a space');
     process.chdir('tests/base with a space');
@@ -830,10 +830,10 @@ test(
     t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged');
     t.end();
   },
-  { skip: !canUseDocker() || brokenOn('win32') }
+  { skip: !canUseDocker() || brokenOn('win32') },
 );
 
-test('py3.9 supports custom file name with fileName option', async (t) => {
+test('py3.10 supports custom file name with fileName option', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   writeFileSync('puck', 'requests');
@@ -842,14 +842,14 @@ test('py3.9 supports custom file name with fileName option', async (t) => {
   const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
   t.true(
     zipfiles.includes(`requests${sep}__init__.py`),
-    'requests is packaged'
+    'requests is packaged',
   );
   t.false(zipfiles.includes(`flask${sep}__init__.py`), 'flask is NOT packaged');
   t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged');
   t.end();
 });
 
-test("py3.9 doesn't package bottle with zip option", async (t) => {
+test("py3.10 doesn't package bottle with zip option", async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -863,29 +863,29 @@ test("py3.9 doesn't package bottle with zip option", async (t) => {
   sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } });
   const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
   const zippedReqs = await listRequirementsZipFiles(
-    '.serverless/sls-py-req-test.zip'
+    '.serverless/sls-py-req-test.zip',
   );
   t.true(
     zipfiles.includes('.requirements.zip'),
-    'zipped requirements are packaged'
+    'zipped requirements are packaged',
   );
   t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged');
   t.false(
     zipfiles.includes(`flask${sep}__init__.py`),
-    "flask isn't packaged on its own"
+    "flask isn't packaged on its own",
   );
   t.true(
     zippedReqs.includes(`flask/__init__.py`),
-    'flask is packaged in the .requirements.zip file'
+    'flask is packaged in the .requirements.zip file',
   );
   t.false(
     zippedReqs.includes(`bottle.py`),
-    'bottle is NOT packaged in the .requirements.zip file'
+    'bottle is NOT packaged in the .requirements.zip file',
   );
   t.end();
 });
 
-test('py3.9 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => {
+test('py3.10 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => {
   process.chdir('tests/base');
   copySync('_slimPatterns.yml', 'slimPatterns.yml');
   const path = npm(['pack', '../..']);
@@ -897,18 +897,18 @@ test('py3.9 can package flask with slim, slimPatterns & slimPatternsAppendDefaul
   t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged');
   t.true(
     zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1,
-    'pyc files are packaged'
+    'pyc files are packaged',
   );
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('__main__.py')),
     [],
-    '__main__.py files are NOT packaged'
+    '__main__.py files are NOT packaged',
   );
   t.end();
 });
 
 test(
-  'py3.9 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options',
+  'py3.10 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options',
   async (t) => {
     process.chdir('tests/base');
     copySync('_slimPatterns.yml', 'slimPatterns.yml');
@@ -925,19 +925,19 @@ test(
     t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged');
     t.true(
       zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1,
-      'pyc files are packaged'
+      'pyc files are packaged',
     );
     t.deepEqual(
       zipfiles.filter((filename) => filename.endsWith('__main__.py')),
       [],
-      '__main__.py files are NOT packaged'
+      '__main__.py files are NOT packaged',
     );
     t.end();
   },
-  { skip: !canUseDocker() || brokenOn('win32') }
+  { skip: !canUseDocker() || brokenOn('win32') },
 );
 
-test('pipenv py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false  option', async (t) => {
+test('pipenv py3.10 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false  option', async (t) => {
   process.chdir('tests/pipenv');
   copySync('_slimPatterns.yml', 'slimPatterns.yml');
   const path = npm(['pack', '../..']);
@@ -950,17 +950,17 @@ test('pipenv py3.9 can package flask with slim & slimPatterns & slimPatternsAppe
   t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged');
   t.true(
     zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1,
-    'pyc files are packaged'
+    'pyc files are packaged',
   );
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('__main__.py')),
     [],
-    '__main__.py files are NOT packaged'
+    '__main__.py files are NOT packaged',
   );
   t.end();
 });
 
-test('poetry py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false  option', async (t) => {
+test('poetry py3.10 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false  option', async (t) => {
   process.chdir('tests/poetry');
   copySync('_slimPatterns.yml', 'slimPatterns.yml');
   const path = npm(['pack', '../..']);
@@ -973,24 +973,24 @@ test('poetry py3.9 can package flask with slim & slimPatterns & slimPatternsAppe
   t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged');
   t.true(
     zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1,
-    'pyc files are packaged'
+    'pyc files are packaged',
   );
   t.deepEqual(
     zipfiles.filter((filename) => filename.endsWith('__main__.py')),
     [],
-    '__main__.py files are NOT packaged'
+    '__main__.py files are NOT packaged',
   );
   t.end();
 });
 
-test('poetry py3.9 can package flask with package individually option', async (t) => {
+test('poetry py3.10 can package flask with package individually option', async (t) => {
   process.chdir('tests/poetry_individually');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
 
   sls(['package'], { env: {} });
   const zipfiles = await listZipFiles(
-    '.serverless/module1-sls-py-req-test-dev-hello.zip'
+    '.serverless/module1-sls-py-req-test-dev-hello.zip',
   );
   t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged');
   t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged');
@@ -998,7 +998,7 @@ test('poetry py3.9 can package flask with package individually option', async (t
   t.end();
 });
 
-test('py3.9 can package flask with package individually option', async (t) => {
+test('py3.10 can package flask with package individually option', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -1006,81 +1006,81 @@ test('py3.9 can package flask with package individually option', async (t) => {
   const zipfiles_hello = await listZipFiles('.serverless/hello.zip');
   t.false(
     zipfiles_hello.includes(`fn2${sep}__init__.py`),
-    'fn2 is NOT packaged in function hello'
+    'fn2 is NOT packaged in function hello',
   );
   t.true(
     zipfiles_hello.includes('handler.py'),
-    'handler.py is packaged in function hello'
+    'handler.py is packaged in function hello',
   );
   t.false(
     zipfiles_hello.includes(`dataclasses.py`),
-    'dataclasses is NOT packaged in function hello'
+    'dataclasses is NOT packaged in function hello',
   );
   t.true(
     zipfiles_hello.includes(`flask${sep}__init__.py`),
-    'flask is packaged in function hello'
+    'flask is packaged in function hello',
   );
 
   const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip');
   t.false(
     zipfiles_hello2.includes(`fn2${sep}__init__.py`),
-    'fn2 is NOT packaged in function hello2'
+    'fn2 is NOT packaged in function hello2',
   );
   t.true(
     zipfiles_hello2.includes('handler.py'),
-    'handler.py is packaged in function hello2'
+    'handler.py is packaged in function hello2',
   );
   t.false(
     zipfiles_hello2.includes(`dataclasses.py`),
-    'dataclasses is NOT packaged in function hello2'
+    'dataclasses is NOT packaged in function hello2',
   );
   t.true(
     zipfiles_hello2.includes(`flask${sep}__init__.py`),
-    'flask is packaged in function hello2'
+    'flask is packaged in function hello2',
   );
 
   const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip');
   t.false(
     zipfiles_hello3.includes(`fn2${sep}__init__.py`),
-    'fn2 is NOT packaged in function hello3'
+    'fn2 is NOT packaged in function hello3',
   );
   t.true(
     zipfiles_hello3.includes('handler.py'),
-    'handler.py is packaged in function hello3'
+    'handler.py is packaged in function hello3',
   );
   t.false(
     zipfiles_hello3.includes(`dataclasses.py`),
-    'dataclasses is NOT packaged in function hello3'
+    'dataclasses is NOT packaged in function hello3',
   );
   t.false(
     zipfiles_hello3.includes(`flask${sep}__init__.py`),
-    'flask is NOT packaged in function hello3'
+    'flask is NOT packaged in function hello3',
   );
 
   const zipfiles_hello4 = await listZipFiles(
-    '.serverless/fn2-sls-py-req-test-dev-hello4.zip'
+    '.serverless/fn2-sls-py-req-test-dev-hello4.zip',
   );
   t.false(
     zipfiles_hello4.includes(`fn2${sep}__init__.py`),
-    'fn2 is NOT packaged in function hello4'
+    'fn2 is NOT packaged in function hello4',
   );
   t.true(
     zipfiles_hello4.includes('fn2_handler.py'),
-    'fn2_handler is packaged in the zip-root in function hello4'
+    'fn2_handler is packaged in the zip-root in function hello4',
   );
   t.true(
     zipfiles_hello4.includes(`dataclasses.py`),
-    'dataclasses is packaged in function hello4'
+    'dataclasses is packaged in function hello4',
   );
   t.false(
     zipfiles_hello4.includes(`flask${sep}__init__.py`),
-    'flask is NOT packaged in function hello4'
+    'flask is NOT packaged in function hello4',
   );
 
   t.end();
 });
 
-test('py3.9 can package flask with package individually & slim option', async (t) => {
+test('py3.10 can package flask with package individually & slim option', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -1088,137 +1088,137 @@ test('py3.9 can package flask with package individually & slim option', async (t
   const zipfiles_hello = await listZipFiles('.serverless/hello.zip');
   t.true(
     zipfiles_hello.includes('handler.py'),
-    'handler.py is packaged in function hello'
+    'handler.py is packaged in function hello',
   );
   t.deepEqual(
     zipfiles_hello.filter((filename) => filename.endsWith('.pyc')),
     [],
-    'no pyc files packaged in function hello'
+    'no pyc files packaged in function hello',
   );
   t.true(
     zipfiles_hello.includes(`flask${sep}__init__.py`),
-    'flask is packaged in function hello'
+    'flask is packaged in function hello',
   );
   t.false(
     zipfiles_hello.includes(`dataclasses.py`),
-    'dataclasses is NOT packaged in function hello'
+    'dataclasses is NOT packaged in function hello',
   );
 
   const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip');
   t.true(
     zipfiles_hello2.includes('handler.py'),
-    'handler.py is packaged in function hello2'
+    'handler.py is packaged in function hello2',
   );
   t.deepEqual(
     zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')),
     [],
-    'no pyc files packaged in function hello2'
+    'no pyc files packaged in function hello2',
   );
   t.true(
     zipfiles_hello2.includes(`flask${sep}__init__.py`),
-    'flask is packaged in function hello2'
+    'flask is packaged in function hello2',
   );
   t.false(
     zipfiles_hello2.includes(`dataclasses.py`),
-    'dataclasses is NOT packaged in function hello2'
+    'dataclasses is NOT packaged in function hello2',
   );
 
   const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip');
   t.true(
     zipfiles_hello3.includes('handler.py'),
-    'handler.py is packaged in function hello3'
+    'handler.py is packaged in function hello3',
   );
   t.deepEqual(
     zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')),
     [],
-    'no pyc files packaged in function hello3'
+    'no pyc files packaged in function hello3',
   );
   t.false(
     zipfiles_hello3.includes(`flask${sep}__init__.py`),
-    'flask is NOT packaged in function hello3'
+    'flask is NOT packaged in function hello3',
   );
 
   const zipfiles_hello4 = await listZipFiles(
-    '.serverless/fn2-sls-py-req-test-dev-hello4.zip'
+    '.serverless/fn2-sls-py-req-test-dev-hello4.zip',
   );
   t.true(
     zipfiles_hello4.includes('fn2_handler.py'),
-    'fn2_handler is packaged in the zip-root in function hello4'
+    'fn2_handler is packaged in the zip-root in function hello4',
   );
   t.true(
     zipfiles_hello4.includes(`dataclasses.py`),
-    'dataclasses is packaged in function hello4'
+    'dataclasses is packaged in function hello4',
   );
   t.false(
     zipfiles_hello4.includes(`flask${sep}__init__.py`),
-    'flask is NOT packaged in function hello4'
+    'flask is NOT packaged in function hello4',
   );
   t.deepEqual(
     zipfiles_hello4.filter((filename) => filename.endsWith('.pyc')),
     [],
-    'no pyc files packaged in function hello4'
+    'no pyc files packaged in function hello4',
   );
 
   t.end();
 });
 
-test('py3.9 can package only requirements of module', async (t) => {
+test('py3.10 can package only requirements of module', async (t) => {
   process.chdir('tests/individually');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
   sls(['package'], { env: {} });
   const zipfiles_hello = await listZipFiles(
-    '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip'
+    '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip',
   );
   t.true(
     zipfiles_hello.includes('handler1.py'),
-    'handler1.py is packaged at root level in function hello1'
+    'handler1.py is packaged at root level in function hello1',
   );
   t.false(
     zipfiles_hello.includes('handler2.py'),
-    'handler2.py is NOT packaged at root level in function hello1'
+    'handler2.py is NOT packaged at root level in function hello1',
   );
   t.true(
     zipfiles_hello.includes(`pyaml${sep}__init__.py`),
-    'pyaml is packaged in function hello1'
+    'pyaml is packaged in function hello1',
   );
   t.true(
     zipfiles_hello.includes(`boto3${sep}__init__.py`),
-    'boto3 is packaged in function hello1'
+    'boto3 is packaged in function hello1',
   );
   t.false(
     zipfiles_hello.includes(`flask${sep}__init__.py`),
-    'flask is NOT packaged in function hello1'
+    'flask is NOT packaged in function hello1',
   );
 
   const zipfiles_hello2 = await listZipFiles(
-    '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip'
+    '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip',
   );
   t.true(
     zipfiles_hello2.includes('handler2.py'),
-    'handler2.py is packaged at root level in function hello2'
+    'handler2.py is packaged at root level in function hello2',
   );
   t.false(
     zipfiles_hello2.includes('handler1.py'),
-    'handler1.py is NOT packaged at root level in function hello2'
+    'handler1.py is NOT packaged at root level in function hello2',
   );
   t.false(
     zipfiles_hello2.includes(`pyaml${sep}__init__.py`),
-    'pyaml is NOT packaged in function hello2'
+    'pyaml is NOT packaged in function hello2',
   );
   t.false(
     zipfiles_hello2.includes(`boto3${sep}__init__.py`),
-    'boto3 is NOT packaged in function hello2'
+    'boto3 is NOT packaged in function hello2',
   );
   t.true(
     zipfiles_hello2.includes(`flask${sep}__init__.py`),
-    'flask is packaged in function hello2'
+    'flask is packaged in function hello2',
   );
 
   t.end();
 });
 
-test('py3.9 can package lambda-decorators using vendor and invidiually option', async (t) => {
+test('py3.10 can package lambda-decorators using vendor and invidiually option', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -1226,71 +1226,71 @@ test('py3.9 can package lambda-decorators using vendor and invidiually option',
   const zipfiles_hello = await listZipFiles('.serverless/hello.zip');
   t.true(
     zipfiles_hello.includes('handler.py'),
-    'handler.py is packaged at root level in function hello'
+    'handler.py is packaged at root level in function hello',
   );
   t.true(
     zipfiles_hello.includes(`flask${sep}__init__.py`),
-    'flask is packaged in function hello'
+    'flask is packaged in function hello',
   );
   t.true(
     zipfiles_hello.includes(`lambda_decorators.py`),
-    'lambda_decorators.py is packaged in function hello'
+    'lambda_decorators.py is packaged in function hello',
   );
   t.false(
     zipfiles_hello.includes(`dataclasses.py`),
-    'dataclasses is NOT packaged in function hello'
+    'dataclasses is NOT packaged in function hello',
   );
 
   const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip');
   t.true(
     zipfiles_hello2.includes('handler.py'),
-    'handler.py is packaged at root level in function hello2'
+    'handler.py is packaged at root level in function hello2',
   );
   t.true(
     zipfiles_hello2.includes(`flask${sep}__init__.py`),
-    'flask is packaged in function hello2'
+    'flask is packaged in function hello2',
   );
   t.true(
     zipfiles_hello2.includes(`lambda_decorators.py`),
-    'lambda_decorators.py is packaged in function hello2'
+    'lambda_decorators.py is packaged in function hello2',
   );
   t.false(
     zipfiles_hello2.includes(`dataclasses.py`),
-    'dataclasses is NOT packaged in function hello2'
+    'dataclasses is NOT packaged in function hello2',
   );
 
   const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip');
   t.true(
     zipfiles_hello3.includes('handler.py'),
-    'handler.py is packaged at root level in function hello3'
+    'handler.py is packaged at root level in function hello3',
   );
   t.false(
     zipfiles_hello3.includes(`flask${sep}__init__.py`),
-    'flask is NOT packaged in function hello3'
+    'flask is NOT packaged in function hello3',
   );
   t.false(
     zipfiles_hello3.includes(`lambda_decorators.py`),
-    'lambda_decorators.py is NOT packaged in function hello3'
+    'lambda_decorators.py is NOT packaged in function hello3',
   );
   t.false(
     zipfiles_hello3.includes(`dataclasses.py`),
-    'dataclasses is NOT packaged in function hello3'
+    'dataclasses is NOT packaged in function hello3',
   );
 
   const zipfiles_hello4 = await listZipFiles(
-    '.serverless/fn2-sls-py-req-test-dev-hello4.zip'
+    '.serverless/fn2-sls-py-req-test-dev-hello4.zip',
   );
   t.true(
     zipfiles_hello4.includes('fn2_handler.py'),
-    'fn2_handler is packaged in the zip-root in function hello4'
+    'fn2_handler is packaged in the zip-root in function hello4',
   );
   t.true(
     zipfiles_hello4.includes(`dataclasses.py`),
-    'dataclasses is packaged in function hello4'
+    'dataclasses is packaged in function hello4',
   );
   t.false(
     zipfiles_hello4.includes(`flask${sep}__init__.py`),
-    'flask is NOT packaged in function hello4'
+    'flask is NOT packaged in function hello4',
   );
   t.end();
 });
@@ -1307,31 +1307,31 @@ test(
     npm(['i', path]);
     sls(['package'], { env: {} });
     const zipfiles_hello1 = await listZipFilesWithMetaData(
-      '.serverless/hello1.zip'
+      '.serverless/hello1.zip',
     );
 
     t.true(
       zipfiles_hello1['module1/foobar'].unixPermissions
         .toString(8)
         .slice(3, 6) === perm,
-      'foobar has retained its executable file permissions'
+      'foobar has retained its executable file permissions',
     );
 
     const zipfiles_hello2 = await listZipFilesWithMetaData(
-      '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip'
+      '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip',
     );
     const flaskPerm = statSync(
-      '.serverless/module2/requirements/bin/flask'
+      '.serverless/module2/requirements/bin/flask',
     ).mode;
 
     t.true(
       zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm,
-      'bin/flask has retained its executable file permissions'
+      'bin/flask has retained its executable file permissions',
     );
 
     t.end();
   },
-  { skip: process.platform === 'win32' }
+  { skip: process.platform === 'win32' },
 );
 
 test(
@@ -1346,35 +1346,35 @@ test(
     npm(['i', path]);
     sls(['package'], { env: { dockerizePip: 'true' } });
     const zipfiles_hello = await listZipFilesWithMetaData(
-      '.serverless/hello1.zip'
+      '.serverless/hello1.zip',
     );
 
     t.true(
       zipfiles_hello['module1/foobar'].unixPermissions
         .toString(8)
         .slice(3, 6) === perm,
-      'foobar has retained its executable file permissions'
+      'foobar has retained its executable file permissions',
     );
 
     const zipfiles_hello2 = await listZipFilesWithMetaData(
-      '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip'
+      '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip',
     );
     const flaskPerm = statSync(
-      '.serverless/module2/requirements/bin/flask'
+      '.serverless/module2/requirements/bin/flask',
     ).mode;
 
     t.true(
       zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm,
-      'bin/flask has retained its executable file permissions'
+      'bin/flask has retained its executable file permissions',
     );
 
     t.end();
   },
-  { skip: !canUseDocker() || process.platform === 'win32' }
+  { skip: !canUseDocker() || process.platform === 'win32' },
 );
 
 test(
-  'py3.9 can package flask running in docker with module runtime & architecture of function',
+  'py3.10 can package flask running in docker with module runtime & architecture of function',
   async (t) => {
     process.chdir('tests/individually_mixed_runtime');
     const path = npm(['pack', '../..']);
@@ -1385,24 +1385,24 @@ test(
     });
 
     const zipfiles_hello2 = await listZipFiles(
-      '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip'
+      '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip',
     );
     t.true(
       zipfiles_hello2.includes('handler2.py'),
-      'handler2.py is packaged at root level in function hello2'
+      'handler2.py is packaged at root level in function hello2',
     );
     t.true(
       zipfiles_hello2.includes(`flask${sep}__init__.py`),
-      'flask is packaged in function hello2'
+      'flask is packaged in function hello2',
     );
   },
   {
     skip: !canUseDocker() || process.platform === 'win32',
-  }
+  },
 );
 
 test(
-  'py3.9 can package flask succesfully when using mixed architecture, docker and zipping',
+  'py3.10 can package flask succesfully when using mixed architecture, docker and zipping',
   async (t) => {
     process.chdir('tests/individually_mixed_runtime');
     const path = npm(['pack', '../..']);
@@ -1413,51 +1413,134 @@ test(
     const zipfiles_hello = await listZipFiles('.serverless/hello1.zip');
     t.true(
       zipfiles_hello.includes(`module1${sep}handler1.ts`),
-      'handler1.ts is packaged in module dir for hello1'
+      'handler1.ts is packaged in module dir for hello1',
     );
     t.false(
       zipfiles_hello.includes('handler2.py'),
-      'handler2.py is NOT packaged at root level in function hello1'
+      'handler2.py is NOT packaged at root level in function hello1',
     );
     t.false(
       zipfiles_hello.includes(`flask${sep}__init__.py`),
-      'flask is NOT packaged in function hello1'
+      'flask is NOT packaged in function hello1',
     );
 
     const zipfiles_hello2 = await listZipFiles(
-      '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip'
+      '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip',
     );
     const zippedReqs = await listRequirementsZipFiles(
-      '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip'
+      '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip',
     );
     t.true(
       zipfiles_hello2.includes('handler2.py'),
-      'handler2.py is packaged at root level in function hello2'
+      'handler2.py is packaged at root level in function hello2',
     );
     t.false(
       zipfiles_hello2.includes(`module1${sep}handler1.ts`),
-      'handler1.ts is NOT included at module1 level in hello2'
+      'handler1.ts is NOT included at module1 level in hello2',
     );
     t.false(
       zipfiles_hello2.includes(`pyaml${sep}__init__.py`),
-      'pyaml is NOT packaged in function hello2'
+      'pyaml is NOT packaged in function hello2',
     );
     t.false(
       zipfiles_hello2.includes(`boto3${sep}__init__.py`),
-      'boto3 is NOT included in zipfile'
+      'boto3 is NOT included in zipfile',
     );
     t.true(
       zippedReqs.includes(`flask${sep}__init__.py`),
-      'flask is packaged in function hello2 in requirements.zip'
+      'flask is packaged in function hello2 in requirements.zip',
     );
 
     t.end();
   },
-  { skip: !canUseDocker() || process.platform === 'win32' }
+  { skip: !canUseDocker() || process.platform === 'win32' },
+);
+
+test(
+  'py3.10 can package flask running in docker with module runtime & architecture of function',
+  async (t) => {
+    process.chdir('tests/individually_mixed_runtime');
+    const path = npm(['pack', '../..']);
+    npm(['i', path]);
+
+    sls(['package'], {
+      env: { dockerizePip: 'true' },
+    });
+
+    const zipfiles_hello2 = await listZipFiles(
+      '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip',
+    );
+    t.true(
+      zipfiles_hello2.includes('handler2.py'),
+      'handler2.py is packaged at root level in function hello2',
+    );
+    t.true(
+      zipfiles_hello2.includes(`flask${sep}__init__.py`),
+      'flask is packaged in function hello2',
+    );
+  },
+  {
+    skip: !canUseDocker() || process.platform === 'win32',
+  },
 );
 
 test(
-  'py3.9 uses download cache by default option',
+  'py3.10 can package flask succesfully when using mixed architecture, docker and zipping',
+  async (t) => {
+    process.chdir('tests/individually_mixed_runtime');
+    const path = npm(['pack', '../..']);
+
+    npm(['i', path]);
+    sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } });
+
+    const zipfiles_hello = await listZipFiles('.serverless/hello1.zip');
+    t.true(
+      zipfiles_hello.includes(`module1${sep}handler1.ts`),
+      'handler1.ts is packaged in module dir for hello1',
+    );
+    t.false(
+      zipfiles_hello.includes('handler2.py'),
+      'handler2.py is NOT packaged at root level in function hello1',
+    );
+    t.false(
+      zipfiles_hello.includes(`flask${sep}__init__.py`),
+      'flask is NOT packaged in function hello1',
+    );
+
+    const zipfiles_hello2 = await listZipFiles(
+      '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip',
+    );
+    const zippedReqs = await listRequirementsZipFiles(
+      '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip',
+    );
+    t.true(
+      zipfiles_hello2.includes('handler2.py'),
+      'handler2.py is packaged at root level in function hello2',
+    );
+    t.false(
+      zipfiles_hello2.includes(`module1${sep}handler1.ts`),
+      'handler1.ts is NOT included at module1 level in hello2',
+    );
+    t.false(
+      zipfiles_hello2.includes(`pyaml${sep}__init__.py`),
+      'pyaml is NOT packaged in function hello2',
+    );
+    t.false(
+      zipfiles_hello2.includes(`boto3${sep}__init__.py`),
+      'boto3 is NOT included in zipfile',
+    );
+    t.true(
+      zippedReqs.includes(`flask${sep}__init__.py`),
+      'flask is packaged in function hello2 in requirements.zip',
+    );
+
+    t.end();
+  },
+  { skip: !canUseDocker() || process.platform === 'win32' },
+);
+
+test(
+  'py3.10 uses download cache by default option',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -1466,15 +1549,15 @@ test(
     const cachepath = getUserCachePath();
     t.true(
       pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`),
-      'cache directory exists'
+      'cache directory exists',
     );
     t.end();
   },
-  { skip: true }
+  { skip: true },
 );
 
 test(
-  'py3.9 uses download cache by default',
+  'py3.10 uses download cache by default',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -1482,15 +1565,15 @@ test(
     sls(['package'], { env: { cacheLocation: '.requirements-cache' } });
     t.true(
       pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`),
-      'cache directory exists'
+      'cache directory exists',
     );
     t.end();
   },
-  { skip: true }
+  { skip: true },
 );
 
 test(
-  'py3.9 uses download cache with dockerizePip option',
+  'py3.10 uses download cache with dockerizePip option',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -1499,16 +1582,16 @@ test(
     const cachepath = getUserCachePath();
     t.true(
       pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`),
-      'cache directory exists'
+      'cache directory exists',
     );
     t.end();
   },
   // { skip: !canUseDocker() || brokenOn('win32') }
-  { skip: true }
+  { skip: true },
 );
 
 test(
-  'py3.9 uses download cache with dockerizePip by default option',
+  'py3.10 uses download cache with dockerizePip by default option',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -1518,16 +1601,16 @@ test(
     });
     t.true(
       pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`),
-      'cache directory exists'
+      'cache directory exists',
     );
     t.end();
   },
   // { skip: !canUseDocker() || brokenOn('win32') }
-  { skip: true }
+  { skip: true },
 );
 
 test(
-  'py3.9 uses static and download cache',
+  'py3.10 uses static and download cache',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -1538,21 +1621,21 @@ test(
     const arch = 'x86_64';
     t.true(
       pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`),
-      'http exists in download-cache'
+      'http exists in download-cache',
     );
     t.true(
       pathExistsSync(
-        `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask`
+        `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask`,
       ),
-      'flask exists in static-cache'
+      'flask exists in static-cache',
     );
     t.end();
   },
-  { skip: true }
+  { skip: true },
 );
 
 test(
-  'py3.9 uses static and download cache with dockerizePip option',
+  'py3.10 uses static and download cache with dockerizePip option',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -1563,20 +1646,20 @@ test(
     const arch = 'x86_64';
     t.true(
       pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`),
-      'http exists in download-cache'
+      'http exists in download-cache',
     );
     t.true(
       pathExistsSync(
-        `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask`
+        `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask`,
       ),
-      'flask exists in static-cache'
+      'flask exists in static-cache',
     );
     t.end();
   },
-  { skip: !canUseDocker() || brokenOn('win32') }
+  { skip: !canUseDocker() || brokenOn('win32') },
 );
 
-test('py3.9 uses static cache', async (t) => {
+test('py3.10 uses static cache', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -1586,33 +1669,33 @@ test('py3.9 uses static cache', async (t) => {
   const arch = 'x86_64';
   t.true(
     pathExistsSync(
-      `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask`
+      `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask`,
     ),
-    'flask exists in static-cache'
+    'flask exists in static-cache',
   );
   t.true(
     pathExistsSync(
-      `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements`
+      `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements`,
     ),
-    '.completed_requirements exists in static-cache'
+    '.completed_requirements exists in static-cache',
   );
 
-  // py3.9 checking that static cache actually pulls from cache (by poisoning it)
+  // py3.10 checking that static cache actually pulls from cache (by poisoning it)
   writeFileSync(
     `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`,
-    'injected new file into static cache folder'
+    'injected new file into static cache folder',
   );
   sls(['package'], { env: {} });
   const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
   t.true(
     zipfiles.includes('injected_file_is_bad_form'),
-    "static cache is really used when running 'sls package' again"
+    "static cache is really used when running 'sls package' again",
   );
 
   t.end();
 });
 
-test('py3.9 uses static cache with cacheLocation option', async (t) => {
+test('py3.10 uses static cache with cacheLocation option', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -1622,21 +1705,21 @@ test('py3.9 uses static cache with cacheLocation option', async (t) => {
   const arch = 'x86_64';
   t.true(
     pathExistsSync(
-      `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask`
+      `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask`,
     ),
-    'flask exists in static-cache'
+    'flask exists in static-cache',
   );
   t.true(
     pathExistsSync(
-      `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements`
+      `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements`,
     ),
-    '.completed_requirements exists in static-cache'
+    '.completed_requirements exists in static-cache',
   );
   t.end();
 });
 
 test(
-  'py3.9 uses static cache with dockerizePip & slim option',
+  'py3.10 uses static cache with dockerizePip & slim option',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -1647,41 +1730,41 @@ test(
     const arch = 'x86_64';
     t.true(
       pathExistsSync(
-        `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask`
+        `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask`,
       ),
-      'flask exists in static-cache'
+      'flask exists in static-cache',
     );
     t.true(
       pathExistsSync(
-        `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements`
+        `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements`,
       ),
-      '.completed_requirements exists in static-cache'
+      '.completed_requirements exists in static-cache',
     );
 
-    // py3.9 checking that static cache actually pulls from cache (by poisoning it)
+    // py3.10 checking that static cache actually pulls from cache (by poisoning it)
     writeFileSync(
       `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`,
-      'injected new file into static cache folder'
+      'injected new file into static cache folder',
     );
     sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } });
     const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
     t.true(
       zipfiles.includes('injected_file_is_bad_form'),
-      "static cache is really used when running 'sls package' again"
+      "static cache is really used when running 'sls package' again",
     );
     t.deepEqual(
       zipfiles.filter((filename) => filename.endsWith('.pyc')),
       [],
-      'no pyc files are packaged'
+      'no pyc files are packaged',
     );
 
     t.end();
   },
-  { skip: !canUseDocker() || brokenOn('win32') }
+  { skip: !canUseDocker() || brokenOn('win32') },
 );
 
 test(
-  'py3.9 uses download cache with dockerizePip & slim option',
+  'py3.10 uses download cache with dockerizePip & slim option',
   async (t) => {
     process.chdir('tests/base');
     const path = npm(['pack', '../..']);
@@ -1690,7 +1773,7 @@ test(
     const cachepath = getUserCachePath();
     t.true(
       pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`),
-      'http exists in download-cache'
+      'http exists in download-cache',
     );
 
     const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
@@ -1698,15 +1781,15 @@ test(
     t.deepEqual(
       zipfiles.filter((filename) => filename.endsWith('.pyc')),
       [],
-      'no pyc files are packaged'
+      'no pyc files are packaged',
     );
 
     t.end();
   },
-  { skip: !canUseDocker() || brokenOn('win32') }
+  { skip: !canUseDocker() || brokenOn('win32') },
 );
 
-test('py3.9 can ignore functions defined with `image`', async (t) => {
+test('py3.10 can ignore functions defined with `image`', async (t) => {
   process.chdir('tests/base');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -1714,25 +1797,25 @@ test('py3.9 can ignore functions defined with `image`', async (t) => {
   t.true(pathExistsSync('.serverless/hello.zip'), 'function hello is packaged');
   t.true(
     pathExistsSync('.serverless/hello2.zip'),
-    'function hello2 is packaged'
+    'function hello2 is packaged',
   );
   t.true(
     pathExistsSync('.serverless/hello3.zip'),
-    'function hello3 is packaged'
+    'function hello3 is packaged',
   );
   t.true(
     pathExistsSync('.serverless/hello4.zip'),
-    'function hello4 is packaged'
+    'function hello4 is packaged',
   );
   t.false(
     pathExistsSync('.serverless/hello5.zip'),
-    'function hello5 is not packaged'
+    'function hello5 is not packaged',
   );
 
   t.end();
 });
 
-test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoetryLockFile is set to true', async (t) => {
+test('poetry py3.10 fails packaging if poetry.lock is missing and flag requirePoetryLockFile is set to true', async (t) => {
   copySync('tests/poetry', 'tests/base with a space');
   process.chdir('tests/base with a space');
   removeSync('poetry.lock');
@@ -1745,9 +1828,9 @@ test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoe
   });
   t.true(
     stdout.includes(
-      'poetry.lock file not found - set requirePoetryLockFile to false to disable this error'
+      'poetry.lock file not found - set requirePoetryLockFile to false to disable this error',
     ),
-    'flag works and error is properly reported'
+    'flag works and error is properly reported',
   );
   t.end();
 });
@@ -1759,12 +1842,12 @@ test('works with provider.runtime not being python', async (t) => {
   sls(['package'], { env: { runtime: 'nodejs12.x' } });
   t.true(
     pathExistsSync('.serverless/sls-py-req-test.zip'),
-    'sls-py-req-test is packaged'
+    'sls-py-req-test is packaged',
   );
   t.end();
 });
 
-test('poetry py3.9 packages additional optional packages', async (t) => {
+test('poetry py3.10 packages additional optional packages', async (t) => {
   process.chdir('tests/poetry_packages');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -1780,7 +1863,7 @@ test('poetry py3.9 packages additional optional packages', async (t) => {
   t.end();
 });
 
-test('poetry py3.9 skips additional optional packages specified in withoutGroups', async (t) => {
+test('poetry py3.10 skips additional optional packages specified in withoutGroups', async (t) => {
   process.chdir('tests/poetry_packages');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -1797,7 +1880,7 @@ test('poetry py3.9 skips additional optional packages specified in withoutGroups
   t.end();
 });
 
-test('poetry py3.9 only installs optional packages specified in onlyGroups', async (t) => {
+test('poetry py3.10 only installs optional packages specified in onlyGroups', async (t) => {
   process.chdir('tests/poetry_packages');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
@@ -1813,19 +1896,65 @@ test('poetry py3.9 only installs optional packages specified in onlyGroups', asy
   t.end();
 });
 
-test('py3.7 injects dependencies into `package` folder when using scaleway provider', async (t) => {
+test('py3.10 injects dependencies into `package` folder when using scaleway provider', async (t) => {
   process.chdir('tests/scaleway_provider');
   const path = npm(['pack', '../..']);
   npm(['i', path]);
+  sls(['plugin', 'install', '-n', 'serverless-scaleway-functions'], {
+    env: {},
+  });
   sls(['package'], { env: {} });
   const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
   t.true(
     zipfiles.includes(`package${sep}flask${sep}__init__.py`),
-    'flask is packaged'
+    'flask is packaged',
   );
   t.true(
     zipfiles.includes(`package${sep}boto3${sep}__init__.py`),
-    'boto3 is packaged'
+    'boto3 is packaged',
+  );
+  t.end();
+});
+
+test('pyproject.toml py3.10 packages', async (t) => {
+  process.chdir('tests/pyproject_packages');
+  const path = npm(['pack', '../..']);
+  npm(['i', path]);
+  sls(['package'], { env: { pythonBin: getPythonBin(3) } });
+  const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
+  t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged');
+  t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged');
+  t.false(zipfiles.includes(`pytest${sep}__init__.py`), 'pytest is packaged');
+  t.end();
+});
+
+test('pyproject.toml py3.10 packages with optional', async (t) => {
+  process.chdir('tests/pyproject_packages_optional');
+  const path = npm(['pack', '../..']);
+  npm(['i', path]);
+  sls(['package'], { env: { pythonBin: getPythonBin(3) } });
+  const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
+  t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged');
+  t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged');
+  t.true(zipfiles.includes(`pytest${sep}__init__.py`), 'pytest is packaged');
+  t.true(
+    zipfiles.includes(`pytest_cov${sep}__init__.py`),
+    'pytest-cov is packaged',
+  );
+  t.end();
+});
+
+test('pyproject.toml py3.10 install code as pacakage', async (t) => {
+  process.chdir('tests/install_folder_as_package');
+  const path = npm(['pack', '../..']);
+  npm(['i', path]);
+  sls(['package'], { env: { pythonBin: getPythonBin(3) } });
+  const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip');
+  t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged');
+  t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged');
+  t.true(
+    zipfiles.includes(`example${sep}__init__.py`),
+    'developers pacakage is packaged',
   );
   t.end();
 });
diff --git a/tests/base/fn2/requirements.txt b/tests/base/fn2/requirements.txt
index eea18113..a46627b5 100644
--- a/tests/base/fn2/requirements.txt
+++ b/tests/base/fn2/requirements.txt
@@ -1 +1 @@
-dataclasses
\ No newline at end of file
+dataclasses
diff --git a/tests/base/requirements-w-hashes.txt b/tests/base/requirements-w-hashes.txt
index 428e8528..d3d9928d 100644
--- a/tests/base/requirements-w-hashes.txt
+++ b/tests/base/requirements-w-hashes.txt
@@ -1,90 +1,126 @@
 #
-# This file is autogenerated by pip-compile
-# To update, run:
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
 #
-#    pip-compile --generate-hashes --output-file requirements-w-hashes.txt requirements.txt
+#    pip-compile --generate-hashes --output-file=requirements-w-hashes.txt requirements.txt
 #
-boto3==1.9.50 \
-    --hash=sha256:177e9dd53db5028bb43050da20cc7956287889fc172e5e6275a634e42a10beeb \
-    --hash=sha256:8c63e616b91907037ab19236afbcf0057efb31411faf38b46f4590e634dc17ea
-botocore==1.12.50 \
-    --hash=sha256:07fae5a2b8cfb5a92c1dbee3f2feb4da7c471bcead7e18ce735babe5f39e270f \
-    --hash=sha256:eeaa190f50ee05a56225ee78c64cb8bf0c3bf090ec605ca6c2f325aa3826a347 \
-    # via boto3, s3transfer
-bottle==0.12.19 \
-    --hash=sha256:f6b8a34fe9aa406f9813c02990db72ca69ce6a158b5b156d2c41f345016a723d \
-    --hash=sha256:a9d73ffcbc6a1345ca2d7949638db46349f5b2b77dac65d6494d45c23628da2c
-click==7.0 \
-    --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \
-    --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \
+boto3==1.34.39 \
+    --hash=sha256:35bcbecf1b5d3620c93f0062d2994177f8bda25a9d2cba144d6462793c16065b \
+    --hash=sha256:476896e70d36c9134d4125834280c597c17b54bff4902baf2e5fcde74f8acec8
+    # via -r requirements.txt
+botocore==1.34.39 \
+    --hash=sha256:9f00bd5e4698bcdd37ce6e224a896baf58d209678ed92834944b767de9061cc5 \
+    --hash=sha256:e175360445424b83b0e28ae20d301b99cf44ff2c9d5ab1d8670899bec05a9753
+    # via
+    #   boto3
+    #   s3transfer
+bottle==0.12.25 \
+    --hash=sha256:d6f15f9d422670b7c073d63bd8d287b135388da187a0f3e3c19293626ce034ea \
+    --hash=sha256:e1a9c94970ae6d710b3fb4526294dfeb86f2cb4a81eff3a4b98dc40fb0e5e021
+    # via -r requirements.txt
+click==8.1.7 \
+    --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \
+    --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de
     # via flask
-docutils==0.14 \
-    --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \
-    --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \
-    --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6 \
-    # via botocore
-flask==1.0.2 \
-    --hash=sha256:2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48 \
-    --hash=sha256:a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05
-itsdangerous==1.1.0 \
-    --hash=sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19 \
-    --hash=sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749 \
+flask==2.2.5 \
+    --hash=sha256:58107ed83443e86067e41eff4631b058178191a355886f8e479e347fa1285fdf \
+    --hash=sha256:edee9b0a7ff26621bd5a8c10ff484ae28737a2410d99b0bb9a6850c7fb977aa0
+    # via -r requirements.txt
+itsdangerous==2.1.2 \
+    --hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44 \
+    --hash=sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a
     # via flask
-jinja2==2.11.3 \
-    --hash=sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419 \
-    --hash=sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6 \
+jinja2==3.1.3 \
+    --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \
+    --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90
     # via flask
-jmespath==0.9.3 \
-    --hash=sha256:6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64 \
-    --hash=sha256:f11b4461f425740a1d908e9a3f7365c3d2e569f6ca68a2ff8bc5bcd9676edd63 \
-    # via boto3, botocore
-markupsafe==1.1.0 \
-    --hash=sha256:048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432 \
-    --hash=sha256:130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b \
-    --hash=sha256:19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9 \
-    --hash=sha256:1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af \
-    --hash=sha256:1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834 \
-    --hash=sha256:1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd \
-    --hash=sha256:1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d \
-    --hash=sha256:31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7 \
-    --hash=sha256:3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b \
-    --hash=sha256:4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3 \
-    --hash=sha256:525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c \
-    --hash=sha256:52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2 \
-    --hash=sha256:52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7 \
-    --hash=sha256:5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36 \
-    --hash=sha256:5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1 \
-    --hash=sha256:5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e \
-    --hash=sha256:7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1 \
-    --hash=sha256:83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c \
-    --hash=sha256:857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856 \
-    --hash=sha256:98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550 \
-    --hash=sha256:bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492 \
-    --hash=sha256:d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672 \
-    --hash=sha256:e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401 \
-    --hash=sha256:edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6 \
-    --hash=sha256:efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6 \
-    --hash=sha256:f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c \
-    --hash=sha256:f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd \
-    --hash=sha256:fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1 \
-    # via jinja2
-python-dateutil==2.7.5 \
-    --hash=sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93 \
-    --hash=sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02 \
+jmespath==1.0.1 \
+    --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
+    --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
+    # via
+    #   boto3
+    #   botocore
+markupsafe==2.1.5 \
+    --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \
+    --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \
+    --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \
+    --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \
+    --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \
+    --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \
+    --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \
+    --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \
+    --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \
+    --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \
+    --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \
+    --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \
+    --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \
+    --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \
+    --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \
+    --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \
+    --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \
+    --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \
+    --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \
+    --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \
+    --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \
+    --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \
+    --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \
+    --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \
+    --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \
+    --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \
+    --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \
+    --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \
+    --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \
+    --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \
+    --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \
+    --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \
+    --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \
+    --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \
+    --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \
+    --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \
+    --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \
+    --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \
+    --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \
+    --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \
+    --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \
+    --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \
+    --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \
+    --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \
+    --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \
+    --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \
+    --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \
+    --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \
+    --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \
+    --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \
+    --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \
+    --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \
+    --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \
+    --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \
+    --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \
+    --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \
+    --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \
+    --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \
+    --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \
+    --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68
+    # via
+    #   jinja2
+    #   werkzeug
+python-dateutil==2.8.2 \
+    --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
+    --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
     # via botocore
-s3transfer==0.1.13 \
-    --hash=sha256:90dc18e028989c609146e241ea153250be451e05ecc0c2832565231dacdf59c1 \
-    --hash=sha256:c7a9ec356982d5e9ab2d4b46391a7d6a950e2b04c472419f5fdec70cc0ada72f \
+s3transfer==0.10.0 \
+    --hash=sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e \
+    --hash=sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b
     # via boto3
-six==1.11.0 \
-    --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \
-    --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb \
+six==1.16.0 \
+    --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
+    --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
     # via python-dateutil
-urllib3==1.24.2 \
-    --hash=sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0 \
-    --hash=sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3 \
+urllib3==2.0.7 \
+    --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \
+    --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e
     # via botocore
-werkzeug==0.15.3 \
-    --hash=sha256:97660b282aa7e29f94f3fe378e5c7162d7ab9d601a8dbb1cbb2ffc8f0e54607d \
-    --hash=sha256:cfd1281b1748288e59762c0e174d64d8bcb2b70e7c57bc4a1203c8825af24ac3 \
+werkzeug==3.0.1 \
+    --hash=sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc \
+    --hash=sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10
     # via flask
diff --git a/tests/base/requirements-w-nested.txt b/tests/base/requirements-w-nested.txt
index b09aa52a..61b8cc2d 100644
--- a/tests/base/requirements-w-nested.txt
+++ b/tests/base/requirements-w-nested.txt
@@ -1,3 +1,3 @@
-flask==2.0.3
+flask==2.2.5
 bottle
 -r requirements-common.txt
diff --git a/tests/base/requirements.txt b/tests/base/requirements.txt
index 23bfb7a6..e693ce9d 100644
--- a/tests/base/requirements.txt
+++ b/tests/base/requirements.txt
@@ -1,3 +1,3 @@
-flask==0.12.5
+flask==2.2.5
 bottle
 boto3
diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml
index a82187ff..43da20ab 100644
--- a/tests/base/serverless.yml
+++ b/tests/base/serverless.yml
@@ -2,7 +2,7 @@ service: sls-py-req-test
 
 provider:
   name: aws
-  runtime: ${env:runtime, 'python3.9'}
+  runtime: ${env:runtime, 'python3.10'}
 
 plugins:
   - serverless-python-requirements
@@ -47,7 +47,7 @@ functions:
     handler: handler.hello
   hello3:
     handler: handler.hello
-    runtime: nodejs8.10
+    runtime: nodejs14.x
   hello4:
     handler: fn2_handler.hello
     module: fn2
diff --git a/tests/individually/module2/requirements.txt b/tests/individually/module2/requirements.txt
index c09d0264..0f4e7dbf 100644
--- a/tests/individually/module2/requirements.txt
+++ b/tests/individually/module2/requirements.txt
@@ -1 +1 @@
-flask==2.0.3
+flask==2.2.5
diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml
index 6409532b..cfaf0013 100644
--- a/tests/individually/serverless.yml
+++ b/tests/individually/serverless.yml
@@ -2,7 +2,7 @@ service: sls-py-req-test-indiv
 
 provider:
   name: aws
-  runtime: python3.9
+  runtime: python3.10
 
 package:
   individually: true
diff --git a/tests/individually_mixed_runtime/module1/handler1.ts b/tests/individually_mixed_runtime/module1/handler1.ts
index b8062f8b..a061a135 100644
--- a/tests/individually_mixed_runtime/module1/handler1.ts
+++ b/tests/individually_mixed_runtime/module1/handler1.ts
@@ -1,3 +1,3 @@
 function hello() {
-    return "hello"
+  return 'hello';
 }
diff --git a/tests/individually_mixed_runtime/module2/requirements.txt b/tests/individually_mixed_runtime/module2/requirements.txt
index c09d0264..0f4e7dbf 100644
--- a/tests/individually_mixed_runtime/module2/requirements.txt
+++ b/tests/individually_mixed_runtime/module2/requirements.txt
@@ -1 +1 @@
-flask==2.0.3
+flask==2.2.5
diff --git a/tests/individually_mixed_runtime/serverless.yml b/tests/individually_mixed_runtime/serverless.yml
index 7c602239..c2f4d35f 100644
--- a/tests/individually_mixed_runtime/serverless.yml
+++ b/tests/individually_mixed_runtime/serverless.yml
@@ -28,7 +28,7 @@ functions:
   hello2:
     handler: handler2.hello
     module: module2
-    runtime: python3.9
+    runtime: python3.10
     architecture: x86_64
     package:
       patterns:
diff --git a/tests/install_folder_as_package/package.json b/tests/install_folder_as_package/package.json
new file mode 100644
index 00000000..b07744c9
--- /dev/null
+++ b/tests/install_folder_as_package/package.json
@@ -0,0 +1,14 @@
+{
+  "name": "example",
+  "version": "1.0.0",
+  "description": "",
+  "main": "index.js",
+  "scripts": {
+    "test": "echo \"Error: no test specified\" && exit 1"
+  },
+  "author": "",
+  "license": "ISC",
+  "dependencies": {
+    "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz"
+  }
+}
diff --git a/tests/install_folder_as_package/pyproject.toml b/tests/install_folder_as_package/pyproject.toml
new file mode 100644
index 00000000..2c842c2e
--- /dev/null
+++ b/tests/install_folder_as_package/pyproject.toml
@@ -0,0 +1,17 @@
+[project]
+name = "example"
+version = "1.0.0"
+authors = [
+  { name="Jackson Borneman", email="11304426+jax-b@users.noreply.github.com" },
+]
+description = "example"
+requires-python = ">=3.10"
+classifiers = [
+    "Programming Language :: Python :: 3",
+]
+
+
+dependencies = [
+  "flask",
+  "boto3"
+]
diff --git a/tests/install_folder_as_package/serverless.yml b/tests/install_folder_as_package/serverless.yml
new file mode 100644
index 00000000..dcf1c84f
--- /dev/null
+++ b/tests/install_folder_as_package/serverless.yml
@@ -0,0 +1,30 @@
+service: sls-py-req-test
+
+provider:
+  name: aws
+  runtime: python3.10
+
+plugins:
+  - serverless-python-requirements
+custom:
+  pythonRequirements:
+    zip: ${env:zip, self:custom.defaults.zip}
+    slim: ${env:slim, self:custom.defaults.slim}
+    slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns}
+    slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults}
+    dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip}
+    requirePoetryLockFile: ${env:requirePoetryLockFile, false}
+    installFolderAsPackage: true
+  defaults:
+    zip: false
+    slimPatterns: false
+    slimPatternsAppendDefaults: true
+    slim: false
+    dockerizePip: false
+
+package:
+  individually: false
+
+functions:
+  hello:
+    handler: example.handler.hello
diff --git a/tests/install_folder_as_package/src/example/__init__.py b/tests/install_folder_as_package/src/example/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/install_folder_as_package/src/example/handler.py b/tests/install_folder_as_package/src/example/handler.py
new file mode 100644
index 00000000..42f37c0a
--- /dev/null
+++ b/tests/install_folder_as_package/src/example/handler.py
@@ -0,0 +1,7 @@
+import requests
+from example.helper import helper
+
+def hello(event, context):
+    return_data = requests.get('https://httpbin.org/get').json()
+    return_data["test"] = helper()
+    return return_data
diff --git a/tests/install_folder_as_package/src/example/helper.py b/tests/install_folder_as_package/src/example/helper.py
new file mode 100644
index 00000000..1ed7015f
--- /dev/null
+++ b/tests/install_folder_as_package/src/example/helper.py
@@ -0,0 +1,2 @@
+def helper():
+    return "helper"
diff --git a/tests/non_build_pyproject/requirements.txt b/tests/non_build_pyproject/requirements.txt
index 09764fc3..ea958376 100644
--- a/tests/non_build_pyproject/requirements.txt
+++ b/tests/non_build_pyproject/requirements.txt
@@ -1,2 +1,2 @@
-flask==2.0.3
+flask==2.2.5
 boto3
diff --git a/tests/non_build_pyproject/serverless.yml b/tests/non_build_pyproject/serverless.yml
index d1bbaee6..df5a9ad5 100644
--- a/tests/non_build_pyproject/serverless.yml
+++ b/tests/non_build_pyproject/serverless.yml
@@ -2,7 +2,7 @@ service: sls-py-req-test
 
 provider:
   name: aws
-  runtime: python3.9
+  runtime: python3.10
 
 plugins:
   - serverless-python-requirements
diff --git a/tests/non_poetry_pyproject/serverless.yml b/tests/non_poetry_pyproject/serverless.yml
index 7338b10b..d71bfbe4 100644
--- a/tests/non_poetry_pyproject/serverless.yml
+++ b/tests/non_poetry_pyproject/serverless.yml
@@ -2,7 +2,7 @@ service: sls-py-req-test
 
 provider:
   name: aws
-  runtime: python3.9
+  runtime: python3.10
 
 plugins:
   - serverless-python-requirements
diff --git a/tests/pipenv/Pipfile b/tests/pipenv/Pipfile
index 30e51dda..fe943d37 100644
--- a/tests/pipenv/Pipfile
+++ b/tests/pipenv/Pipfile
@@ -4,7 +4,7 @@ verify_ssl = true
 name = "pypi"
 
 [packages]
-Flask = "==2.0.3"
+Flask = "==2.2.5"
 bottle = "*"
 boto3 = "*"
 
diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml
index 2b471526..f92ea998 100644
--- a/tests/pipenv/serverless.yml
+++ b/tests/pipenv/serverless.yml
@@ -2,7 +2,7 @@ service: sls-py-req-test
 
 provider:
   name: aws
-  runtime: python3.9
+  runtime: python3.10
 
 plugins:
   - serverless-python-requirements
diff --git a/tests/poetry/pyproject.toml b/tests/poetry/pyproject.toml
index 896b48e7..247465b5 100644
--- a/tests/poetry/pyproject.toml
+++ b/tests/poetry/pyproject.toml
@@ -5,10 +5,10 @@ description = ""
 authors = ["Your Name <you@example.com>"]
 
 [tool.poetry.dependencies]
-python = "^3.7"
-Flask = "2.0"
-bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"}
-boto3 = "1.29.6"
+python = "^3.10"
+Flask = "2.2.5"
+bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.25"}
+boto3 = "1.34.39"
 
 [tool.poetry.dev-dependencies]
 
diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml
index d10c4997..b77ebdb7 100644
--- a/tests/poetry/serverless.yml
+++ b/tests/poetry/serverless.yml
@@ -2,7 +2,7 @@ service: sls-py-req-test
 
 provider:
   name: aws
-  runtime: python3.9
+  runtime: python3.10
 
 plugins:
   - serverless-python-requirements
diff --git a/tests/poetry_individually/module1/pyproject.toml b/tests/poetry_individually/module1/pyproject.toml
index 896b48e7..247465b5 100644
--- a/tests/poetry_individually/module1/pyproject.toml
+++ b/tests/poetry_individually/module1/pyproject.toml
@@ -5,10 +5,10 @@ description = ""
 authors = ["Your Name <you@example.com>"]
 
 [tool.poetry.dependencies]
-python = "^3.7"
-Flask = "2.0"
-bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"}
-boto3 = "1.29.6"
+python = "^3.10"
+Flask = "2.2.5"
+bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.25"}
+boto3 = "1.34.39"
 
 [tool.poetry.dev-dependencies]
 
diff --git a/tests/poetry_individually/serverless.yml b/tests/poetry_individually/serverless.yml
index 86dbb547..43214462 100644
--- a/tests/poetry_individually/serverless.yml
+++ b/tests/poetry_individually/serverless.yml
@@ -2,7 +2,7 @@ service: sls-py-req-test
 
 provider:
   name: aws
-  runtime: python3.9
+  runtime: python3.10
 
 plugins:
   - serverless-python-requirements
diff --git a/tests/poetry_packages/pyproject.toml b/tests/poetry_packages/pyproject.toml
index 0f9fc705..9c617dc4 100644
--- a/tests/poetry_packages/pyproject.toml
+++ b/tests/poetry_packages/pyproject.toml
@@ -5,14 +5,14 @@ description = ""
 authors = ["Your Name <you@example.com>"]
 
 [tool.poetry.dependencies]
-python = "^3.7"
+python = "^3.10"
 Flask = "2.0"
 
 [tool.poetry.group.custom1.dependencies]
-bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"}
+bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.25"}
 
 [tool.poetry.group.custom2.dependencies]
-boto3 = "1.29.6"
+boto3 = "1.34.39"
 
 [build-system]
 requires = ["poetry-core"]
diff --git a/tests/poetry_packages/serverless.yml b/tests/poetry_packages/serverless.yml
index c6972ede..11b0e757 100644
--- a/tests/poetry_packages/serverless.yml
+++ b/tests/poetry_packages/serverless.yml
@@ -2,7 +2,7 @@ service: sls-py-req-test
 
 provider:
   name: aws
-  runtime: python3.9
+  runtime: python3.10
 
 plugins:
   - serverless-python-requirements
diff --git a/tests/pyproject_packages/handler.py b/tests/pyproject_packages/handler.py
new file mode 100644
index 00000000..5e2e67ff
--- /dev/null
+++ b/tests/pyproject_packages/handler.py
@@ -0,0 +1,5 @@
+import requests
+
+
+def hello(event, context):
+    return requests.get('https://httpbin.org/get').json()
diff --git a/tests/pyproject_packages/package.json b/tests/pyproject_packages/package.json
new file mode 100644
index 00000000..f771e78e
--- /dev/null
+++ b/tests/pyproject_packages/package.json
@@ -0,0 +1,14 @@
+{
+  "name": "example",
+  "version": "1.0.0",
+  "description": "",
+  "main": "index.js",
+  "scripts": {
+    "test": "echo \"Error: no test specified\" && exit 1"
+  },
+  "author": "",
+  "license": "ISC",
+  "dependencies": {
+    "serverless-python-requirements": "file:.."
+  }
+}
diff --git a/tests/pyproject_packages/pyproject.toml b/tests/pyproject_packages/pyproject.toml
new file mode 100644
index 00000000..94f93601
--- /dev/null
+++ b/tests/pyproject_packages/pyproject.toml
@@ -0,0 +1,18 @@
+[project]
+name = "example"
+version = "1.0.0"
+authors = [
+  { name="Jackson Borneman", email="11304426+jax-b@users.noreply.github.com" },
+]
+description = "example"
+requires-python = ">=3.10"
+
+dependencies = [
+  "flask",
+  "boto3"
+]
+
+[project.optional-dependencies]
+test = [
+  "pytest",
+]
diff --git a/tests/pyproject_packages/serverless.yml b/tests/pyproject_packages/serverless.yml
new file mode 100644
index 00000000..763197dc
--- /dev/null
+++ b/tests/pyproject_packages/serverless.yml
@@ -0,0 +1,29 @@
+service: sls-py-req-test
+
+provider:
+  name: aws
+  runtime: python3.10
+
+plugins:
+  - serverless-python-requirements
+custom:
+  pythonRequirements:
+    zip: ${env:zip, self:custom.defaults.zip}
+    slim: ${env:slim, self:custom.defaults.slim}
+    slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns}
+    slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults}
+    dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip}
+    requirePoetryLockFile: ${env:requirePoetryLockFile, false}
+  defaults:
+    zip: false
+    slimPatterns: false
+    slimPatternsAppendDefaults: true
+    slim: false
+    dockerizePip: false
+
+package:
+  individually: false
+
+functions:
+  hello:
+    handler: handler.hello
diff --git a/tests/pyproject_packages_optional/handler.py b/tests/pyproject_packages_optional/handler.py
new file mode 100644
index 00000000..5e2e67ff
--- /dev/null
+++ b/tests/pyproject_packages_optional/handler.py
@@ -0,0 +1,5 @@
+import requests
+
+
+def hello(event, context):
+    return requests.get('https://httpbin.org/get').json()
diff --git a/tests/pyproject_packages_optional/package.json b/tests/pyproject_packages_optional/package.json
new file mode 100644
index 00000000..f771e78e
--- /dev/null
+++ b/tests/pyproject_packages_optional/package.json
@@ -0,0 +1,14 @@
+{
+  "name": "example",
+  "version": "1.0.0",
+  "description": "",
+  "main": "index.js",
+  "scripts": {
+    "test": "echo \"Error: no test specified\" && exit 1"
+  },
+  "author": "",
+  "license": "ISC",
+  "dependencies": {
+    "serverless-python-requirements": "file:.."
+  }
+}
diff --git a/tests/pyproject_packages_optional/pyproject.toml b/tests/pyproject_packages_optional/pyproject.toml
new file mode 100644
index 00000000..8b06e636
--- /dev/null
+++ b/tests/pyproject_packages_optional/pyproject.toml
@@ -0,0 +1,20 @@
+[project]
+name = "example"
+version = "1.0.0"
+authors = [
+  { name="Jackson Borneman", email="11304426+jax-b@users.noreply.github.com" },
+]
+description = "example"
+requires-python = ">=3.10"
+
+dependencies = [
+  "flask",
+  "hug",
+  "boto3"
+]
+
+[project.optional-dependencies]
+test = [
+  "pytest",
+  "pytest-cov"
+]
diff --git a/tests/pyproject_packages_optional/serverless.yml b/tests/pyproject_packages_optional/serverless.yml
new file mode 100644
index 00000000..8cc4dc2c
--- /dev/null
+++ b/tests/pyproject_packages_optional/serverless.yml
@@ -0,0 +1,25 @@
+service: sls-py-req-test
+
+provider:
+  name: aws
+  runtime: python3.10
+
+plugins:
+  - serverless-python-requirements
+custom:
+  pythonRequirements:
+    dockerizePip: false
+    pythonBin: ${opt:pythonBin, self:provider.runtime}
+    pyprojectWithGroups:
+      - 'test'
+
+package:
+  individually: false
+  exclude:
+    - '**/*'
+  include:
+    - handler.py
+
+functions:
+  hello:
+    handler: handler.hello
diff --git a/tests/scaleway_provider/requirements.txt b/tests/scaleway_provider/requirements.txt
index 23bfb7a6..e693ce9d 100644
--- a/tests/scaleway_provider/requirements.txt
+++ b/tests/scaleway_provider/requirements.txt
@@ -1,3 +1,3 @@
-flask==0.12.5
+flask==2.2.5
 bottle
 boto3
diff --git a/tests/scaleway_provider/serverless.yml b/tests/scaleway_provider/serverless.yml
index 5d827bdf..7389343a 100644
--- a/tests/scaleway_provider/serverless.yml
+++ b/tests/scaleway_provider/serverless.yml
@@ -4,7 +4,7 @@ configValidationMode: off
 
 provider:
   name: scaleway
-  runtime: python39
+  runtime: python3.10
 
 plugins:
   - serverless-python-requirements