diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml
new file mode 100644
index 0000000..90fbae3
--- /dev/null
+++ b/.github/workflows/test.yaml
@@ -0,0 +1,31 @@
+name: test
+
+on: [push]
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: ["3.9"]
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install flake8 pytest
+ if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
+ - name: Lint with flake8
+ run: |
+ # stop the build if there are Python syntax errors or undefined names
+ flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
+ # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
+ flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
+ - name: Test with makefile
+ run: make test
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index d9d1ad3..b233486 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
+usecases/piper/
applications/
# Byte-compiled / optimized / DLL files
__pycache__/
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..c16d48c
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2022 tatradev.com
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..d914411
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,4 @@
+include piper/base/backend/templates/*.j2
+include piper/base/docker/images/*.j2
+include piper/base/docker_compose/templates/*.j2
+include piper/base/virtualenv/templates/*.j2
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..3714f4e
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,21 @@
+SHELL:=/usr/bin/env bash
+
+.PHONY: lint unit package test jupyter
+
+lint:
+ flake8 piper
+
+unit:
+ pytest -vs tests/import_test.py
+ pytest -vs tests/base_executor_test.py
+ pytest -vs tests/base_test.py
+ pytest -vs tests/envs_test.py::TestCompose
+ pytest -vs tests/envs_test.py::TestVenv
+
+package:
+ pip check
+
+test: package unit
+
+jupyter:
+ docker run -it --rm -p 10000:8888 -v "${PWD}":/home/jovyan/work jupyter/datascience-notebook:b418b67c225b
\ No newline at end of file
diff --git a/Readme.rst b/Readme.rst
new file mode 100644
index 0000000..8fe2847
--- /dev/null
+++ b/Readme.rst
@@ -0,0 +1,158 @@
+|Banner|
+
+`Website `_
+• `Docs `_
+• `Chat (Community & Support) `_
+• `Tutorials `_
+
+|Contribs| |License| |Release| |Python Version| |Docs| |Build status| |Contact| |Blog|
+
+**Piper** is an **open-source** platform for data science and machine
+learning prototyping. Concentrate only on your goals. Key features:
+
+#. Simple **python contexts** experience. Helps to create and deploy pipelines. Does not depend on any proprietary online services.
+
+#. Connect each module into a **pipeline**. Run it via docker or virtual environment. Then build whole **infrastructure** by using venv, Docker or Cloud.
+
+#. Decreases routine and repetitive tasks. Speed up process **from idea to production**.
+
+#. Well-tested and reproducible. Easily extendable by your own **Executor**.
+
+**Piper** aims to help data-scientists and machine-learning developers to create and build full infrastructure for their projects.
+
+.. contents:: **Contents**
+ :backlinks: none
+
+How Piper works
+=============
+
+|Flowchart|
+
+
+
+Quick start
+===========
+Quick start pipertool package compose env
+===========
+
+In root directory project run command in terminal
+
+- sudo -u root /bin/bash
+
+- create and activate venv
+
+- pip install -r requirements.txt
+
+- in configuration.py rename for correctly path for new directory
+
+- python setup.py install
+
+- piper --env-type compose start
+
+- 0.0.0.0:7585 - FastApi
+
+- 0.0.0.0:9001 - Milvus Console (minioadmin/minioadmin)
+
+- piper --env-type compose stop
+
+- pip uninstall piper
+
+Quick start pipertool package compose env
+===========
+
+In root directory project run command in terminal
+
+- sudo -u root /bin/bash
+
+- create and activate venv
+
+- pip install -r requirements.txt
+
+- in configuration.py rename for correctly path for new directory
+
+- python main.py
+
+- await click CTRL+C from compose env
+
+
+
+Installation
+============
+
+pip (PyPI)
+----------
+
+.. code-block:: bash
+ :caption: pip installation
+ pip install pipertool
+
+Comparison to related technologies
+==================================
+
+#. **Jupyter** - is the de facto experimental environment for most data scientists. However, it is desirable to write experimental code.
+
+#. **Data Engineering tools such as** `AirFlow `_ or
+ `Luigi `_ - These are very popular ML pipeline build tools. Airflow can be connected to a kubernetes cluster or collect tasks through a simple PythonOperator. The downside is that their functionality is generally limited on this, that is, they do not provide ML modules out of the box. Moreover, all developments will still have to be wrapped in a scheduler and this is not always a trivial task. However, we like them and we use Airflow and Luigi as possible context for executors.
+
+#. **Azure ML / Amazon SageMaker / Google Cloud** - Cloud platforms really allow you to assemble an entire system from ready-made modules and put it into operation relatively quickly. Of the minuses: high cost, binding to a specific cloud, as well as small customization for specific business needs. For a large business, this is the most logical option - to build an ML infrastructure in the cloud. We also maintain cloud options as posible ways for the deployment step.
+
+#. **DataRobot/Baseten** - They offer an interesting, but small set of ready-made modules. However, in Baseten, all integration is implied in the kubernetes cluster. This is not always convenient and necessary for Proof-of-Concept. Piper also provides an open-source framework in which you can build a truly customized pipeline from many modules. Basically, such companies either do not provide an open-source framework, or provide a very truncated set of modules for experiments, which limits the freedom, functionality, and applicability of these platforms. This is partly similar to the hub of models and datasets in huggingface.
+
+#. **Mlflow / DVC** - There are also many excellent projects on the market for tracking experiments, serving and storing machine learning models. But they are increasingly utilitarian and do not directly help in the task of accelerating the construction of a machine learning MVP project. We plan to add integrations to Piper with the most popular frameworks for the needs of DS and ML specialists.
+
+
+Contributing
+============
+
+|Maintainability| |Donate|
+
+Contributions are welcome! Please see our `Contributing Guide `_ for more
+details. Thanks to all our contributors!
+
+|Contribs|
+
+Mailing List
+============
+
+
+
+Copyright
+=========
+
+This project is distributed under the Apache license version 2.0 (see the LICENSE file in the project root).
+
+By submitting a pull request to this project, you agree to license your contribution under the Apache license version
+2.0 to this project.
+
+
+
+.. |Banner| image:: https://static.tildacdn.com/tild3434-6665-4638-a432-626636353134/illistration.svg
+ :target: http://pipertool.org/
+ :alt: Piper logo
+
+
+.. |Contribs| image:: https://img.shields.io/badge/contributors-3-brightgreen
+ :target: https://github.com/TatraDev/pipertool/graphs/contributors
+ :alt: Contributors
+
+.. |Python Version| image:: https://img.shields.io/badge/python-3.8%20%7C%203.9%20%7C%203.10-blue
+ :target: https://pypi.org/project/pipertool
+ :alt: Python Version
+
+.. |Release| image:: https://img.shields.io/badge/release-v0.0.2-blue
+ :target: https://github.com/TatraDev/pipertool/releases
+
+.. |Build status| image:: https://github.com/TatraDev/pipertool/workflows/test/badge.svg?branch=venv_logic&event=push
+ :target: https://github.com/TatraDev/pipertool/actions?query=workflow%3Atest
+
+.. |Contact| image:: https://img.shields.io/badge/telegram-write%20me-blue.svg
+ :target: https://t.me/pipertool
+
+.. |Blog| image:: https://img.shields.io/badge/site-my%20blog-yellow.svg
+ :target: https://tatradev.com
+
+.. |License| image:: https://img.shields.io/badge/License-Apache_2.0-yellow.svg
+ :target: https://www.apache.org/licenses/LICENSE-2.0
+
+.. |Docs| image:: https://img.shields.io/badge/docs-passing-brightgreen
+ :target: http://pipertool.org
diff --git a/main.py b/main.py
index dde927a..c62fa75 100644
--- a/main.py
+++ b/main.py
@@ -1,38 +1,67 @@
-from piper.services import TestMessageAdder, StringValue, TesseractRecognizer, SpacyNER
-from piper.envs import CurrentEnv, DockerEnv
-from piper.configurations import get_configuration
-import time
import asyncio
import sys
-from piper.utils import tesrct_utils as tu
+import time
from loguru import logger
+
+from piper.configurations import get_configuration
+from piper.envs import ComposeEnv, CurrentEnv, DockerEnv, VirtualEnv
+
+from piper.services import (
+ SpacyNER,
+ StringValue,
+ TesseractRecognizer,
+ FaceDetector,
+ TestMessageAdder,
+)
+
+from piper.utils import tesrct_utils as tu
+
logger.add("file.log", level="INFO", backtrace=True, diagnose=True, rotation='5 MB')
if __name__ == '__main__':
- # cfg = get_configuration()
- # loop = asyncio.get_event_loop()
- # with CurrentEnv() as env:
- # x = StringValue(value="hello, world")
- # adder = TestMessageAdder(appender="!", port=cfg.docker_app_port)
- # result = loop.run_until_complete(adder(x))
- # print(result)
-
- # x = StringValue(value="hello, world")
- # adder = TestMessageAdder(appender="!", port=cfg.docker_app_port)
- # result = loop.run_until_complete(adder(x))
- # print(result)
- # adder.rm_container()
-
- logger.info(f'main here {time.time()}')
cfg = get_configuration()
loop = asyncio.get_event_loop()
- with DockerEnv() as env:
- # object created
- recognizer = TesseractRecognizer(port=cfg.docker_app_port)
-
- result = loop.run_until_complete(recognizer())
- logger.info(f'result of recognition is {result}')
+ with CurrentEnv() as env:
+ x = StringValue(value="hello, world")
+ adder = TestMessageAdder(appender="!", port=cfg.docker_app_port)
+ result = loop.run_until_complete(adder(x))
+ print(result)
+
+ x = StringValue(value="hello, world")
+ adder = TestMessageAdder(appender="!", port=cfg.docker_app_port)
+ result = loop.run_until_complete(adder(x))
+ print(result)
+ adder.rm_container()
+
+ # logger.info(f'main here {time.time()}')
+ # cfg = get_configuration()
+
+ # print(cfg.path)
+
+ # loop = asyncio.get_event_loop()
+ # with DockerEnv() as env:
+ # # object created
+ # # recognizer = TesseractRecognizer(port=cfg.docker_app_port)
+ # recognizer = FaceDetector(port=cfg.docker_app_port)
+
+ # result = loop.run_until_complete(recognizer())
+ # logger.info(f'result of recognition is {result}')
+
+ # with VirtualEnv() as env:
+ # env.copy_struct_project()
+ # env.create_files_for_venv()
+ # env.create_files_for_tests()
+
+ # with ComposeEnv() as env:
+ # try:
+ # env.copy_struct_project()
+ # env.create_files_for_compose(testing=True)
+ # env.start_compose()
+ # except KeyboardInterrupt:
+ # logger.info('Ctrl+C pressed. Except KeyboardInterrupt.')
+ # env.stop_compose()
+ # sys.exit(1)
# sys.exit()
@@ -48,3 +77,4 @@
# logger.info(f'result of NER for model {avalable_model} is {result1_str}')
# else:
# logger.info(f'module didn`t get NER data')
+
diff --git a/piper/__init__.py b/piper/__init__.py
new file mode 100644
index 0000000..7bcde55
--- /dev/null
+++ b/piper/__init__.py
@@ -0,0 +1,11 @@
+from piper.configurations import get_configuration
+from piper.imports import _set_import_functions
+
+configuration = get_configuration()
+
+if configuration.ignore_import_errors:
+ """
+ Piper activates safe import globally for piper work if configured True.
+ This ignores any import errors for safe imports in piper.base.executors
+ """
+ _set_import_functions(ignore=True)
diff --git a/piper/__main__.py b/piper/__main__.py
new file mode 100644
index 0000000..d626c4d
--- /dev/null
+++ b/piper/__main__.py
@@ -0,0 +1,34 @@
+import sys
+
+import click
+
+from piper.envs import ComposeEnv
+
+
+@click.command()
+@click.argument('type_command')
+@click.option(
+ '--env-type', '-e',
+ help='your current interpretation',
+)
+def main(type_command: str, env_type: str):
+ if env_type == 'compose':
+ if type_command == 'start':
+ print("type_command == 'start'")
+ with ComposeEnv() as env:
+ env.copy_struct_project()
+ env.create_files_for_compose()
+ env.start_compose()
+ elif type_command == 'stop':
+ print("type_command == 'stop'")
+ with ComposeEnv() as env:
+ env.stop_compose()
+ else:
+ raise NotImplementedError(f'{env_type} not released in this version pipertool')
+
+
+if __name__ == '__main__':
+ args = sys.argv
+ if "--help" in args or len(args) == 1:
+ print("CVE")
+ main()
diff --git a/piper/base/backend/templates/fast-api.j2 b/piper/base/backend/templates/fast-api.j2
index 6c8ae52..be9e0a1 100644
--- a/piper/base/backend/templates/fast-api.j2
+++ b/piper/base/backend/templates/fast-api.j2
@@ -1,8 +1,10 @@
import time
-from fastapi import FastAPI, Request, status
+from fastapi import FastAPI, Request, status, File, UploadFile
from piper.envs import CurrentEnv
+from pathlib import Path
+
{% for script_name in scripts.keys() %}
from {{ script_name }} import *
{% endfor %}
@@ -14,12 +16,23 @@ async def health_check():
return {"message": "health check"}
with CurrentEnv():
- service = {{ service_class }}( {% for k, v in service_kwargs.items() %} {{ k }}={{ v }}, {% endfor %} )
+ logger.info(f'CurrentEnv')
+ service = {{ service_class }}({% for k, v in service_kwargs.items() %} {{ k }}={{ v }}, {% endfor %})
+ logger.info(f'service {service}')
@app.post('/{{ function_name }}')
async def {{ function_name }}(
- request_model: {{ request_model }},
+ #request_model: {{ request_model }},
+ file: UploadFile = File(...)
):
- result = await service.{{ function_name }}(request_model)
+ data_b = await file.read()
+ suf = Path(file.filename).suffix.lower()
+ suf = suf[1:]
+ logger.info('{{ function_name }} POST request ')
+ result = await service.{{ function_name }}(data_b, suf)
- return result.dict()
\ No newline at end of file
+ logger.info(f'fast_api.j2 result is {result}')
+ try:
+ return result
+ except Exception as e:
+ logger.error(f'fast_api.j2 error while recognize {e}')
diff --git a/piper/base/backend/utils.py b/piper/base/backend/utils.py
index f4a4dd4..42e4693 100644
--- a/piper/base/backend/utils.py
+++ b/piper/base/backend/utils.py
@@ -1,6 +1,7 @@
-import jinja2
import os
+import jinja2
+
def render_fast_api_backend(**kwargs):
"""
diff --git a/piper/base/docker/__init__.py b/piper/base/docker/__init__.py
index 99e6e4b..5fc573d 100644
--- a/piper/base/docker/__init__.py
+++ b/piper/base/docker/__init__.py
@@ -1,4 +1,5 @@
import os
+
import jinja2
@@ -21,24 +22,30 @@ def render(self):
trim_blocks=True,
lstrip_blocks=True)
template = jinja_env.get_template(self.template_file)
- return template.render(cmd=self.cmd, python_docker_version=self.python_docker_version, run_command_lines=self.run_rows, post_install_lines=self.post_install_lines)
+ return template.render(cmd=self.cmd,
+ python_docker_version=self.python_docker_version,
+ run_command_lines=self.run_rows,
+ post_install_lines=self.post_install_lines)
-# class PythonTesseractImage:
+class TensorFlowImage:
-# def __init__(self, tag, python_docker_version, cmd):
-# self.tag = tag
-# self.python_docker_version = python_docker_version
-# self.cmd = cmd
+ def __init__(self, tag, python_docker_version, cmd, template_file, run_rows, post_install_lines):
+ self.tag = tag
+ self.python_docker_version = python_docker_version
+ self.cmd = cmd
+ self.template_file = template_file
+ self.run_rows = run_rows
+ self.post_install_lines = post_install_lines
+ def render(self):
+ """
+ Render docker template
+ """
+ template_dir = os.path.join(os.path.dirname(__file__), 'images')
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ trim_blocks=True,
+ lstrip_blocks=True)
+ template = jinja_env.get_template(self.template_file)
+ return template.render(cmd=self.cmd, ddocker_name=self.tag, ddocker_version=self.python_docker_version, run_command_lines=self.run_rows, post_install_lines=self.post_install_lines)
-# def render(self):
-# """
-# Render docker template
-# """
-# template_dir = os.path.join(os.path.dirname(__file__), 'images')
-# jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
-# trim_blocks=True,
-# lstrip_blocks=True)
-# template = jinja_env.get_template('python-tesrct.j2')
-# return template.render(cmd=self.cmd, python_docker_version=self.python_docker_version, run_command_lines=self.run_command_lines)
\ No newline at end of file
diff --git a/piper/base/docker/images/default-general.j2 b/piper/base/docker/images/default-general.j2
new file mode 100755
index 0000000..1b0203c
--- /dev/null
+++ b/piper/base/docker/images/default-general.j2
@@ -0,0 +1,15 @@
+FROM {{ddocker_name}}:{{ ddocker_version }}
+
+{{ run_command_lines }}
+
+WORKDIR /app
+
+COPY requirements.txt ./requirements.txt
+RUN PYTHONPATH=/usr/bin/python3 pip3 install -r requirements.txt
+
+{{ post_install_lines }}
+
+COPY ./ ./
+RUN chmod +x ./run.sh
+
+ENTRYPOINT ["{{ cmd }}"]
\ No newline at end of file
diff --git a/piper/base/docker_compose/__init__.py b/piper/base/docker_compose/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/piper/base/docker_compose/compose_executors.py b/piper/base/docker_compose/compose_executors.py
new file mode 100644
index 0000000..b5c9b1f
--- /dev/null
+++ b/piper/base/docker_compose/compose_executors.py
@@ -0,0 +1,79 @@
+import inspect
+import subprocess
+from typing import Dict
+
+from piper.base.docker_compose.utils import ComposeServices
+from piper.configurations import get_configuration
+from piper.utils.logger_utils import logger
+
+
+def copy_piper(path: str):
+ cfg = get_configuration()
+ from distutils.dir_util import copy_tree
+ copy_tree(cfg.piper_path, f"{path}/piper")
+
+
+def copy_scripts(path: str, scripts: Dict[str, str]):
+ for script_name, script_path in scripts.items():
+ with open(f"{path}/{script_name}.py", "w") as output:
+ with open(script_path, "r") as current_file:
+ output.write(current_file.read())
+
+
+def write_requirements(path, requirements):
+ with open(f"{path}/requirements.txt", "w") as output:
+ output.write("\n".join(requirements))
+
+
+class ComposeExecutor:
+ requirements = ["gunicorn", "fastapi", "uvicorn", "aiohttp", "Jinja2", "pydantic", "pymilvus", "numpy", "loguru"]
+
+ def __init__(self):
+ logger.info('ComposeExecutor init with is_compose_env()')
+
+ cfg = get_configuration()
+ self.project_output_path = cfg.path
+
+ def scripts(self):
+ return {"service": inspect.getfile(self.__class__)}
+
+ def copy_struct_project(self):
+ copy_piper(self.project_output_path)
+ copy_scripts(self.project_output_path, self.scripts())
+
+ def create_files_for_compose(self, testing: bool = False):
+ logger.info('ComposeExecutor create_fast_api_files_venv()')
+
+ compose_service = ComposeServices(
+ name_path=self.project_output_path,
+ )
+
+ main_fastapi = compose_service.render_script_fastapi()
+ with open(f"{self.project_output_path}/main.py", "w") as output:
+ output.write(main_fastapi)
+
+ docker_compose = compose_service.render_compose_services()
+ with open(f"{self.project_output_path}/docker-compose.yaml", "w") as output:
+ output.write(docker_compose)
+
+ bash_start = compose_service.render_bash_start(testing=testing)
+ with open(f"{self.project_output_path}/bash-start.sh", "w") as output:
+ output.write(bash_start)
+
+ bash_stop = compose_service.render_bash_stop()
+ with open(f"{self.project_output_path}/bash-stop.sh", "w") as output:
+ output.write(bash_stop)
+
+ dockerfile = compose_service.render_dockerfile()
+ with open(f"{self.project_output_path}/Dockerfile", "w") as output:
+ output.write(dockerfile)
+
+ write_requirements(self.project_output_path, self.requirements)
+
+ def start_compose(self):
+ process_chmod_start = subprocess.run(f'chmod +x {self.project_output_path}bash-start.sh', shell=True)
+ process_run = subprocess.run(f'{self.project_output_path}bash-start.sh', shell=True)
+
+ def stop_compose(self):
+ process_chmod_stop = subprocess.run(f'chmod +x {self.project_output_path}bash-stop.sh', shell=True)
+ process_run = subprocess.run(f'{self.project_output_path}bash-stop.sh', shell=True)
diff --git a/piper/base/docker_compose/templates/bash-start-compose.j2 b/piper/base/docker_compose/templates/bash-start-compose.j2
new file mode 100644
index 0000000..a894e04
--- /dev/null
+++ b/piper/base/docker_compose/templates/bash-start-compose.j2
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+
+sudo docker-compose -f {{ name_path }}docker-compose.yaml up --build {{ compose_flag }}
\ No newline at end of file
diff --git a/piper/base/docker_compose/templates/bash-stop-compose.j2 b/piper/base/docker_compose/templates/bash-stop-compose.j2
new file mode 100644
index 0000000..0aed3a9
--- /dev/null
+++ b/piper/base/docker_compose/templates/bash-stop-compose.j2
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+
+sudo docker-compose -f {{ name_path }}docker-compose.yaml down
\ No newline at end of file
diff --git a/piper/base/docker_compose/templates/compose-services.j2 b/piper/base/docker_compose/templates/compose-services.j2
new file mode 100644
index 0000000..88b122b
--- /dev/null
+++ b/piper/base/docker_compose/templates/compose-services.j2
@@ -0,0 +1,60 @@
+version: '3'
+
+services:
+ etcd:
+ container_name: milvus-etcd
+ image: quay.io/coreos/etcd:v3.5.0
+ environment:
+ - ETCD_AUTO_COMPACTION_MODE=revision
+ - ETCD_AUTO_COMPACTION_RETENTION=1000
+ - ETCD_QUOTA_BACKEND_BYTES=4294967296
+ volumes:
+ - data_milvus:/etcd
+ command: etcd -advertise-client-urls=http://127.0.0.1:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd
+
+ minio:
+ container_name: milvus-minio
+ image: minio/minio:RELEASE.2022-03-17T06-34-49Z
+ ports:
+ - "9001:9001"
+ environment:
+ MINIO_ACCESS_KEY: minioadmin
+ MINIO_SECRET_KEY: minioadmin
+ volumes:
+ - data_milvus:/minio_data
+ command: minio server /data --console-address ":9001"
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
+ interval: 30s
+ timeout: 20s
+ retries: 3
+
+ standalone:
+ container_name: milvus-standalone
+ image: milvusdb/milvus:v2.1.1
+ command: ["milvus", "run", "standalone"]
+ environment:
+ ETCD_ENDPOINTS: etcd:2379
+ MINIO_ADDRESS: minio:9000
+ volumes:
+ - data_milvus:/var/lib/milvus
+ ports:
+ - "19530:19530"
+ - "9091:9091"
+ depends_on:
+ - "etcd"
+ - "minio"
+
+ microservice:
+ build:
+ context: .
+ restart: always
+ ports:
+ - 7585:80
+ depends_on:
+ - "etcd"
+ - "minio"
+ - "standalone"
+
+volumes:
+ data_milvus:
\ No newline at end of file
diff --git a/piper/base/docker_compose/templates/dockerfile.j2 b/piper/base/docker_compose/templates/dockerfile.j2
new file mode 100644
index 0000000..5f9c1fd
--- /dev/null
+++ b/piper/base/docker_compose/templates/dockerfile.j2
@@ -0,0 +1,8 @@
+FROM tiangolo/uvicorn-gunicorn-fastapi:python3.8
+
+WORKDIR /app
+
+COPY requirements.txt .
+RUN pip install --no-cache-dir -r requirements.txt
+
+COPY main.py .
diff --git a/piper/base/docker_compose/templates/python-fastapi-milvus.j2 b/piper/base/docker_compose/templates/python-fastapi-milvus.j2
new file mode 100644
index 0000000..6f7b555
--- /dev/null
+++ b/piper/base/docker_compose/templates/python-fastapi-milvus.j2
@@ -0,0 +1,185 @@
+import time
+from typing import Union
+
+import numpy as np
+from fastapi import FastAPI, status
+from pymilvus import (Collection, CollectionSchema, DataType, FieldSchema,
+ connections, utility)
+
+app = FastAPI(
+ docs_url='/',
+ redoc_url=None,
+)
+
+
+# hello_milvus.py demonstrates the basic operations of PyMilvus, a Python SDK of Milvus.
+# 1. connect to Milvus
+# 2. create collection
+# 3. insert data
+# 4. create index
+# 5. search, query, and hybrid search on entities
+# 6. delete entities by PK
+# 7. drop collection
+
+
+fmt = "\n=== {:30} ===\n"
+search_latency_fmt = "search latency = {:.4f}s"
+num_entities, dim = 3000, 8
+
+#################################################################################
+# 1. connect to Milvus
+# Add a new connection alias `default` for Milvus server in `localhost:19530`
+# Actually the "default" alias is a buildin in PyMilvus.
+# If the address of Milvus is the same as `localhost:19530`, you can omit all
+# parameters and call the method as: `connections.connect()`.
+#
+# Note: the `using` parameter of the following methods is default to "default".
+print(fmt.format("start connecting to Milvus"))
+connections.connect("default", host="standalone", port="19530")
+
+
+@app.get("/health_check", status_code=200)
+def health_check():
+ return 'Success'
+
+
+@app.get("/has_collection")
+def has_collection(name_collect: str = "hello_milvus"):
+ has = utility.has_collection(name_collect)
+
+ return f"Does collection hello_milvus exist in Milvus: {has}"
+
+
+@app.get("/get_list_collections")
+def get_list_collections():
+ return {
+ 'list_collections': utility.list_collections()
+ }
+
+
+@app.get("/create_collection")
+def create_collection(name_collect: str = "hello_milvus"):
+ #################################################################################
+ # 2. create collection
+ # We're going to create a collection with 3 fields.
+ # +-+------------+------------+------------------+------------------------------+
+ # | | field name | field type | other attributes | field description |
+ # +-+------------+------------+------------------+------------------------------+
+ # |1| "pk" | VarChar | is_primary=True | "primary field" |
+ # | | | | auto_id=False | |
+ # +-+------------+------------+------------------+------------------------------+
+ # |2| "random" | Double | | "a double field" |
+ # +-+------------+------------+------------------+------------------------------+
+ # |3|"embeddings"| FloatVector| dim=8 | "float vector with dim 8" |
+ # +-+------------+------------+------------------+------------------------------+
+ fields = [
+ FieldSchema(name="pk", dtype=DataType.VARCHAR, is_primary=True, auto_id=False, max_length=100),
+ FieldSchema(name="random", dtype=DataType.DOUBLE),
+ FieldSchema(name="embeddings", dtype=DataType.FLOAT_VECTOR, dim=dim)
+ ]
+
+ schema = CollectionSchema(fields, f"{name_collect} is the simplest demo to introduce the APIs")
+ hello_milvus = Collection(name_collect, schema, consistency_level="Strong")
+
+ return fmt.format(f"Create collection {name_collect}")
+
+
+@app.get("/insert_data")
+def insert_data(name_collect: str = "hello_milvus"):
+ # 3. insert data
+ # We are going to insert 3000 rows of data into `hello_milvus`
+ # Data to be inserted must be organized in fields.
+ #
+ # The insert() method returns:
+ # - either automatically generated primary keys by Milvus if auto_id=True in the schema;
+ # - or the existing primary key field from the entities if auto_id=False in the schema.
+
+ collection = Collection(name_collect)
+ print(fmt.format("Start inserting entities"))
+ rng = np.random.default_rng(seed=19530)
+ entities = [
+ # provide the pk field because `auto_id` is set to False
+ [str(i) for i in range(num_entities)],
+ rng.random(num_entities).tolist(), # field random, only supports list
+ rng.random((num_entities, dim)), # field embeddings, supports numpy.ndarray and list
+ ]
+
+ insert_result = collection.insert(entities)
+
+ print(f"Number of entities in Milvus: {collection.num_entities}") # check the num_entites
+
+ ################################################################################
+ # 4. create index
+ # We are going to create an IVF_FLAT index for hello_milvus collection.
+ # create_index() can only be applied to `FloatVector` and `BinaryVector` fields.
+ print(fmt.format("Start Creating index IVF_FLAT"))
+ index = {
+ "index_type": "IVF_FLAT",
+ "metric_type": "L2",
+ "params": {"nlist": 128},
+ }
+
+ collection.create_index("embeddings", index)
+
+ return 'Success'
+
+
+@app.get("/search_query")
+def search_query(name_collect: str = "hello_milvus"):
+ ################################################################################
+ # 5. search, query, and hybrid search
+ # After data were inserted into Milvus and indexed, you can perform:
+ # - search based on vector similarity
+ # - query based on scalar filtering(boolean, int, etc.)
+ # - hybrid search based on vector similarity and scalar filtering.
+ #
+
+ collection = Collection(name_collect)
+
+ # Before conducting a search or a query, you need to load the data in `hello_milvus` into memory.
+ print(fmt.format("Start loading"))
+ collection.load()
+
+ # -----------------------------------------------------------------------------
+ # search based on vector similarity
+ print(fmt.format("Start searching based on vector similarity"))
+ rng = np.random.default_rng(seed=19530)
+ entities = [
+ # provide the pk field because `auto_id` is set to False
+ [str(i) for i in range(num_entities)],
+ rng.random(num_entities).tolist(), # field random, only supports list
+ rng.random((num_entities, dim)), # field embeddings, supports numpy.ndarray and list
+ ]
+ vectors_to_search = entities[-1][-2:]
+ search_params = {
+ "metric_type": "L2",
+ "params": {"nprobe": 10},
+ }
+
+ start_time = time.time()
+ result = collection.search(vectors_to_search, "embeddings", search_params, limit=3, output_fields=["random"])
+ end_time = time.time()
+
+ result_list = []
+ for hits in result:
+ for hit in hits:
+ result_list.append(
+ {
+ "hit": hit,
+ "random_field": hit.entity.get('random'),
+ },
+ )
+ print(search_latency_fmt.format(end_time - start_time))
+
+ return {
+ 'result': result_list
+ }
+
+
+# 7. drop collection
+# Finally, drop the hello_milvus collection
+@app.get("/drop_collection")
+def drop_collection(name_collect: str = "hello_milvus"):
+ print(fmt.format("Drop collection `hello_milvus`"))
+ utility.drop_collection(name_collect)
+ return 'Success'
diff --git a/piper/base/docker_compose/utils.py b/piper/base/docker_compose/utils.py
new file mode 100644
index 0000000..11a2036
--- /dev/null
+++ b/piper/base/docker_compose/utils.py
@@ -0,0 +1,89 @@
+import os
+
+import jinja2
+
+from piper.utils.logger_utils import logger
+
+
+class ComposeServices:
+
+ def __init__(
+ self,
+ name_path: str,
+ ):
+ self.name_path = name_path
+
+ @staticmethod
+ def render_script_fastapi():
+ """
+ Render main file for fastapi
+ """
+ logger.info('Render main file for fastapi in compose services')
+
+ template_dir = os.path.join(os.path.dirname(__file__), 'templates')
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ trim_blocks=True,
+ lstrip_blocks=True)
+ template = jinja_env.get_template('python-fastapi-milvus.j2')
+ return template.render()
+
+ def render_bash_start(self, testing: bool = False):
+ """
+ Render bash script for bash_start
+ """
+ logger.info('Render bash script for bash_start')
+
+ template_dir = os.path.join(os.path.dirname(__file__), 'templates')
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ trim_blocks=True,
+ lstrip_blocks=True)
+ template = jinja_env.get_template('bash-start-compose.j2')
+
+ compose_flag = '' if testing else '-d'
+ return template.render(
+ name_path=self.name_path,
+ compose_flag=compose_flag,
+ )
+
+ def render_bash_stop(self):
+ """
+ Render bash script for bash_stop
+ """
+ logger.info('Render bash script for bash_stop')
+
+ template_dir = os.path.join(os.path.dirname(__file__), 'templates')
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ trim_blocks=True,
+ lstrip_blocks=True)
+ template = jinja_env.get_template('bash-stop-compose.j2')
+ return template.render(
+ name_path=self.name_path,
+ )
+
+ @staticmethod
+ def render_compose_services():
+ """
+ Render script for compose_services
+ """
+ logger.info('Render script for compose_services')
+
+ template_dir = os.path.join(os.path.dirname(__file__), 'templates')
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ trim_blocks=True,
+ lstrip_blocks=True)
+ template = jinja_env.get_template('compose-services.j2')
+ return template.render()
+
+ @staticmethod
+ def render_dockerfile():
+ """
+ Render dockerfile
+ """
+ logger.info('Render dockerfile')
+
+ template_dir = os.path.join(os.path.dirname(__file__), 'templates')
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ trim_blocks=True,
+ lstrip_blocks=True)
+ template = jinja_env.get_template('dockerfile.j2')
+ return template.render()
diff --git a/piper/base/executors/__init__.py b/piper/base/executors/__init__.py
new file mode 100644
index 0000000..be63ad2
--- /dev/null
+++ b/piper/base/executors/__init__.py
@@ -0,0 +1,3 @@
+from piper.base.executors._base_executor import BaseExecutor
+from piper.base.executors.http import HTTPExecutor
+from piper.base.executors.fastapi import FastAPIExecutor, FastAPITesseractExecutor
diff --git a/piper/base/executors/_base_executor.py b/piper/base/executors/_base_executor.py
new file mode 100644
index 0000000..2bd5b8b
--- /dev/null
+++ b/piper/base/executors/_base_executor.py
@@ -0,0 +1,56 @@
+from abc import abstractmethod
+
+from piper.envs import get_env, is_current_env, Env
+from piper.utils.logger_utils import logger
+
+
+class BaseExecutor:
+ """
+ This class is main executor which you need to inherit to work with piper normally.
+ This sync by default, but you can change to acync and use __call__ with await.
+ To create your child Executor just implement run for sync behavior or exec for async (set is_async)
+ or implement both run and exec
+
+ You can use prepared Executors like HTTPExecutor. Usually you don't need to control behavior for every environment.
+ However you can do that properly for your custom Executor :
+
+ class YourCustomExecutor(BaseExecutor):
+ def run():
+ x + x
+ def docker_run():
+ ... # your custom logic for docker env
+ def compose_run():
+ ... # your custom logic for compose env
+ def custom_env_run():
+ ... # for you own env
+ """
+
+ is_async: bool = False
+
+ @abstractmethod
+ def run(self, *args, **kwargs):
+ raise NotImplementedError(f"run method not implemented in Executor {self}")
+
+ @abstractmethod
+ async def exec(self, *args, **kwargs):
+ raise NotImplementedError(f"exec method not implemented in Executor {self}")
+
+ def env_run(self, env: Env, *args, **kwargs):
+ if is_current_env():
+ return self.run(*args, **kwargs)
+ else:
+ env_run_name = f"{env.name}_run"
+ return getattr(self, env_run_name)(*args, **kwargs)
+
+ async def env_exec(self, env: Env, *args, **kwargs):
+ if is_current_env():
+ return await self.exec(*args, **kwargs)
+ else:
+ env_run_name = f"{env.name}_exec"
+ return await getattr(self, env_run_name)(*args, **kwargs)
+
+ def __call__(self, *args, **kwargs):
+ if self.is_async:
+ return self.env_exec(get_env(), *args, **kwargs)
+ else:
+ return self.env_run(get_env(), *args, **kwargs)
diff --git a/piper/base/executors.py b/piper/base/executors/fastapi.py
similarity index 51%
rename from piper/base/executors.py
rename to piper/base/executors/fastapi.py
index ee6ab47..55ee7b6 100644
--- a/piper/base/executors.py
+++ b/piper/base/executors/fastapi.py
@@ -1,57 +1,29 @@
-from abc import abstractmethod, ABC
-from distutils.command.config import config
-import os
-import time
-from typing import Dict
-import inspect
-
-import aiohttp
-from loguru import logger
-import docker
-from pydantic import BaseModel #, BytesObject, ListOfStringsObject
-
-from piper.base.docker import PythonImage
# from piper.base.docker import PythonTesseractImage
-from piper.base.backend.utils import render_fast_api_backend, render_fast_api_tsrct_backend
-from piper.envs import is_docker_env, is_current_env, get_env
+from piper.base.backend.utils import (render_fast_api_backend,
+ render_fast_api_tsrct_backend)
+from piper.base.docker import PythonImage, TensorFlowImage
from piper.configurations import get_configuration
-from piper.utils import docker_utils as du
+from piper.envs import get_env, is_current_env, is_docker_env, Env
+from piper.utils import docker_utils
+from piper.utils.logger_utils import logger
+from piper.base.executors import HTTPExecutor
-import requests
+import asyncio
+import inspect
import sys
+import time
+from abc import ABC, abstractmethod
+from typing import Dict
+from distutils.dir_util import copy_tree
-class BaseExecutor:
- pass
-
-
-class LocalExecutor:
- pass
-
-
-def is_known(obj):
- basic = obj.__class__.__name__ in {'dict', 'list', 'tuple', 'str', 'int', 'float', 'bool'}
- models = isinstance(obj, (BaseModel,))
- return basic or models
-
-
-def prepare(obj):
- if isinstance(obj, (BaseModel,)):
- return obj.dict()
- return obj
-
-
-def inputs_to_dict(*args, **kwargs):
- from_args = {}
- for arg in args:
- if is_known(arg):
- from_args.update(prepare(arg))
- from_kwargs = {k: prepare(v) for k, v in kwargs.items() if is_known(v)}
- from_args.update(from_kwargs)
- return from_args
+# import aiohttp
+import docker
+import requests
+from pydantic import BaseModel # , BytesObject, ListOfStringsObject
def add_packages_to_install(packages_list):
- row = f'RUN apt install -y {" ".join(packages_list)} \n'
+ row = f'RUN apt install -y {" ".join(packages_list)} \n'
return row
@@ -59,36 +31,9 @@ def add_row(row):
return f'{row} \n'
-class HTTPExecutor(BaseExecutor):
-
- def __init__(self, host: str, port: int, base_handler: str):
- self.host = host
- self.port = port
-
- @abstractmethod
- async def run(self, *args, **kwargs):
- pass
-
- async def __call__(self, *args, **kwargs):
- logger.info(f'get_env() {get_env()}')
- logger.info(f'is_current_env() {is_current_env()}')
- if is_current_env():
- return await self.run(*args, **kwargs)
- else:
- function = "run"
- request_dict = inputs_to_dict(*args, **kwargs)
- logger.info(f'request_dict is {request_dict}')
- async with aiohttp.ClientSession() as session:
- url = f'http://{self.host}:{self.port}/{function}'
- logger.info(f'run function with url {url} and data {request_dict}')
- async with session.post(url, json=request_dict) as resp:
- return await resp.json()
-
-
def copy_piper(path: str):
cfg = get_configuration()
- from distutils.dir_util import copy_tree
- copy_tree(cfg.piper_path, f"{path}/piper")
+ copy_tree(cfg.piper_path, f"{path}piper")
def copy_scripts(path: str, scripts: Dict[str, str]):
@@ -103,39 +48,19 @@ def write_requirements(path, requirements):
output.write("\n".join(requirements))
-def build_image(path: str, docker_image):
- client = docker.DockerClient(base_url='unix://var/run/docker.sock')
+def write_dockerfile(path, docker_image):
image = docker_image.render()
with open(f"{path}/Dockerfile", "w") as output:
output.write(image)
- image, logs = client.images.build(path=path,
- tag=docker_image.tag,
- quiet=False,
- timeout=20)
- for log in logs:
- logger.info(f'executor build_image: {log}')
- logger.info(f'image is {image}')
-
-
-def run_container(image: str, ports: Dict[int, int]):
- client = docker.DockerClient(base_url='unix://var/run/docker.sock')
- container = client.containers.run(image, detach=True, ports=ports)
- for log in container.logs():
- logger.info(f'executor run_container: {log}')
- logger.info(f'container is {container}')
- time.sleep(10)
-
- return container
-
def wait_for_fast_api_app_start(host, external_port, wait_on_iter, n_iters):
- '''
+ """
wait for fast api app will be loaded
- external_port -
+ external_port -
wait_on_iter - seconds between health_check requests
n_iters - total health_check requests
- '''
+ """
logger.info('waiting for FastAPI app start')
i = 0
while True:
@@ -145,6 +70,7 @@ def wait_for_fast_api_app_start(host, external_port, wait_on_iter, n_iters):
if r.status_code == 200:
break
except Exception as e:
+ logger.error(f"Exception while starting FastAPI app {e}")
time.sleep(wait_on_iter)
if i == n_iters:
@@ -152,6 +78,130 @@ def wait_for_fast_api_app_start(host, external_port, wait_on_iter, n_iters):
sys.exit()
i += 1
+
+class FastAPIFaceDetectorExecutor(HTTPExecutor):
+ # basic requements
+ requirements = ["gunicorn", "fastapi", "uvicorn", "aiohttp", "docker", "Jinja2", "pydantic", "loguru", "numpy", "opencv-python", "python-multipart", ]
+
+ # executor specific requements
+ requirements.extend(
+ [
+ # 'python3-opencv'
+ 'tensorflow',
+ 'mtcnn',
+ ]
+ )
+
+ # basic packages
+ packages_list = ['apt-utils', 'tree', 'cmake', 'mc']
+
+ # executor specific packages
+ packages_list.extend(
+ [
+ 'libgl1',
+ 'ffmpeg',
+ 'libsm6',
+ 'libxext6',
+ ]
+ )
+
+ base_handler = "recognize"
+
+ def __init__(self, port: int = 8080, **service_kwargs):
+ self.container = None
+ # self.image_tag = 'piper:latest'\
+ self.image_tag = 'tensorflow/tensorflow'
+ self.container_name = "piper_FastAPI_FaceDetector"
+
+ if is_docker_env():
+ docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock')
+ cfg = get_configuration()
+ project_output_path = cfg.path
+
+ copy_piper(project_output_path)
+ copy_scripts(project_output_path, self.scripts())
+
+ run_rows = ''
+ run_rows += add_row('RUN apt update && apt install -y apt-transport-https')
+ run_rows += add_row('RUN apt install -y software-properties-common')
+ run_rows += add_packages_to_install(self.packages_list)
+ run_rows += add_row('RUN pip3 install --upgrade pip')
+
+ post_install_lines = ""
+
+ docker_image = TensorFlowImage(self.image_tag, 'latest-gpu-jupyter', cmd=f"./run.sh", template_file='default-general.j2', run_rows=run_rows, post_install_lines=post_install_lines)
+ logger.info('Docker file created')
+
+ write_requirements(project_output_path, self.requirements)
+ logger.info('python requirements file created')
+
+ write_dockerfile(project_output_path, docker_image)
+
+ self.create_fast_api_files(project_output_path, **service_kwargs)
+
+ logger.info('build_image')
+ docker_utils.build_image(project_output_path, docker_image.tag)
+ logger.info('image builded')
+
+ # create and run docker container
+ # if container exits it will be recreated!
+ logger.info('create image and container started')
+ container = docker_utils.create_image_and_container_by_dockerfile(
+ docker_client,
+ project_output_path,
+ self.image_tag,
+ self.container_name,
+ port
+ )
+
+ logger.info('waiting for FastApi service start')
+ if container:
+ output = container.attach(stdout=True, stderr=True, stream=False, logs=True)
+ for line in output:
+ logger.info(str(line))
+ #TODO test FastAPI errors by other way
+ if 'Traceback' in str(line):
+ logger.error('FastAPI can`t start')
+ sys.exit()
+ # logger.info(container.stats(decode=False, stream=False))
+
+ wait_for_fast_api_app_start('localhost', cfg.docker_app_port, cfg.wait_on_iter, cfg.n_iters)
+ else:
+ # TODO: Local ENVIRONMENT checks
+ pass
+
+ super().__init__('localhost', port, self.base_handler)
+
+
+ def rm_container(self):
+ if self.container:
+ self.container.remove(force=True)
+
+ def scripts(self):
+ return {"service": inspect.getfile(self.__class__)}
+
+ def create_fast_api_files(self, path: str, **service_kwargs):
+ cfg = get_configuration()
+
+ # TODO add support more than one functions
+ backend = render_fast_api_backend(
+ service_class=self.__class__.__name__,
+ service_kwargs=dict(service_kwargs),
+ scripts=self.scripts(),
+ function_name=self.base_handler,
+ request_model="BytesObject",
+ response_model="ListOfStringsObject"
+ )
+
+ with open(f"{path}/main.py", "w") as output:
+ output.write(backend)
+
+ gunicorn = "#!/bin/bash \n" \
+ f"gunicorn -b 0.0.0.0:8080 --workers {cfg.n_gunicorn_workers} main:app --worker-class uvicorn.workers.UvicornWorker --preload --timeout 240"
+ with open(f"{path}/run.sh", "w") as output:
+ output.write(gunicorn)
+
+
class FastAPIExecutor(HTTPExecutor):
requirements = ["gunicorn", "fastapi", "uvicorn", "aiohttp", "docker", "Jinja2", "pydantic", "loguru"]
base_handler = "run"
@@ -159,7 +209,8 @@ class FastAPIExecutor(HTTPExecutor):
def __init__(self, port: int = 8080, **service_kwargs):
self.container = None
self.image_tag = 'piper:latest'
- self.container_name = "piper_FastAPI"
+ self.id = hash(self)
+ self.container_name = f"piper_FastAPI_{self.id}"
if is_docker_env():
docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock')
@@ -168,12 +219,15 @@ def __init__(self, port: int = 8080, **service_kwargs):
copy_piper(project_output_path)
copy_scripts(project_output_path, self.scripts())
- # build_image(project_output_path, docker_image)
self.create_fast_api_files(project_output_path, **service_kwargs)
+ docker_image = PythonImage(self.image_tag, "3.9", cmd=f"./run.sh", template_file='default-python.j2',
+ run_rows="", post_install_lines="")
+ docker_utils.build_image(project_output_path, docker_image)
+
# create and run docker container
# if container exits it will be recreated!
- du.create_image_and_container_by_dockerfile(
+ docker_utils.create_image_and_container_by_dockerfile(
docker_client,
project_output_path,
self.image_tag,
@@ -188,6 +242,14 @@ def __init__(self, port: int = 8080, **service_kwargs):
super().__init__('localhost', port, self.base_handler)
+ async def aio_call(self, *args, **kwargs):
+ return await super().__call__(*args, ** kwargs)
+
+ def __call__(self, *args, **kwargs):
+ loop = asyncio.get_event_loop()
+ result = loop.run_until_complete(self.aio_call(*args, **kwargs))
+ return result
+
def rm_container(self):
if self.container:
self.container.remove(force=True)
@@ -204,14 +266,13 @@ def create_fast_api_files(self, path: str, **service_kwargs):
function_name=self.base_handler,
request_model="StringValue",
response_model="StringValue")
-
with open(f"{path}/main.py", "w") as output:
output.write(backend)
write_requirements(path, self.requirements)
gunicorn = "#!/bin/bash \n" \
- f"gunicorn -b 0.0.0.0:8080 --workers {cfg.n_gunicorn_workers} main:app --worker-class uvicorn.workers.UvicornWorker --preload --timeout 120 --reload=True"
+ f"gunicorn -b 0.0.0.0:8080 --workers {cfg.n_gunicorn_workers} main:app --worker-class uvicorn.workers.UvicornWorker --preload --timeout 120"
with open(f"{path}/run.sh", "w") as output:
output.write(gunicorn)
@@ -240,19 +301,23 @@ def __init__(self, port: int = 8080, **service_kwargs):
run_rows += add_packages_to_install(self.packages_list)
run_rows += add_row('RUN pip3 install --upgrade pip')
- # скачивает сюда /usr/local/lib/python3.9/site-packages/en_core_web_sm
- # post_install_lines = f'RUN python3 -m spacy download {cfg.spacy_model} --data-path {cfg.model_path}'
+ # download to /usr/local/lib/python3.9/site-packages/en_core_web_sm
+ # post_install_lines = f'RUN python3 -m spacy download {",".join(cfg.spacy_models)} --data-path {cfg.model_path}'
post_install_lines = ""
- # docker_image = PythonTesseractImage(self.image_tag, "3.9", cmd=f"./run.sh")
+ write_requirements(project_output_path, self.requirements)
+ logger.info('python requirements file created')
+
docker_image = PythonImage(self.image_tag, "3.9", cmd=f"./run.sh", template_file='default-python.j2', run_rows=run_rows, post_install_lines=post_install_lines)
- build_image(project_output_path, docker_image)
+ write_dockerfile(project_output_path, docker_image)
self.create_fast_api_files(project_output_path, **service_kwargs)
+ docker_utils.build_image(project_output_path, docker_image.tag)
+
# create and run docker container
# if container exits it will be recreated!
- du.create_image_and_container_by_dockerfile(
+ docker_utils.create_image_and_container_by_dockerfile(
docker_client,
project_output_path,
self.image_tag,
@@ -267,10 +332,6 @@ def __init__(self, port: int = 8080, **service_kwargs):
super().__init__('localhost', port, self.base_handler)
- def rm_container(self):
- if self.container:
- self.container.remove(force=True)
-
def scripts(self):
return {"service": inspect.getfile(self.__class__)}
@@ -278,21 +339,20 @@ def create_fast_api_files(self, path: str, **service_kwargs):
cfg = get_configuration()
# TODO add support more than one functions
- backend = render_fast_api_tsrct_backend(
+ backend = render_fast_api_backend(
service_class=self.__class__.__name__,
service_kwargs=dict(service_kwargs),
scripts=self.scripts(),
function_name=self.base_handler,
- # request_model="BytesObject",
- # response_model="ListOfStringsObject"
+ request_model="BytesObject",
+ response_model="ListOfStringsObject"
)
with open(f"{path}/main.py", "w") as output:
output.write(backend)
- write_requirements(path, self.requirements)
-
gunicorn = "#!/bin/bash \n" \
- f"gunicorn -b 0.0.0.0:8080 --workers {cfg.n_gunicorn_workers} main:app --worker-class uvicorn.workers.UvicornWorker --preload --timeout 120"
+ f"gunicorn -b 0.0.0.0:8080 --workers {cfg.n_gunicorn_workers} main:app --worker-class uvicorn.workers.UvicornWorker --preload --timeout 240"
with open(f"{path}/run.sh", "w") as output:
output.write(gunicorn)
+
diff --git a/piper/base/executors/http.py b/piper/base/executors/http.py
new file mode 100644
index 0000000..f15e90d
--- /dev/null
+++ b/piper/base/executors/http.py
@@ -0,0 +1,56 @@
+from abc import abstractmethod
+
+import aiohttp
+
+from piper.envs import get_env, is_current_env
+from piper.utils.logger_utils import logger
+from piper.base.executors import BaseExecutor
+from pydantic import BaseModel
+
+
+def is_known(obj):
+ basic = obj.__class__.__name__ in {'dict', 'list', 'tuple', 'str', 'int', 'float', 'bool'}
+ models = isinstance(obj, (BaseModel,))
+ return basic or models
+
+
+def prepare(obj):
+ if isinstance(obj, (BaseModel,)):
+ return obj.dict()
+ return obj
+
+
+def inputs_to_dict(*args, **kwargs):
+ from_args = {}
+ for arg in args:
+ if is_known(arg):
+ from_args.update(prepare(arg))
+ from_kwargs = {k: prepare(v) for k, v in kwargs.items() if is_known(v)}
+ from_args.update(from_kwargs)
+ return from_args
+
+
+class HTTPExecutor(BaseExecutor):
+
+ def __init__(self, host: str, port: int, base_handler: str):
+ self.host = host
+ self.port = port
+
+ @abstractmethod
+ async def run(self, *args, **kwargs):
+ pass
+
+ async def __call__(self, *args, **kwargs):
+ logger.info(f'get_env() {get_env()}')
+ logger.info(f'is_current_env() {is_current_env()}')
+ if is_current_env():
+ return await self.run(*args, **kwargs)
+ else:
+ function = "run"
+ request_dict = inputs_to_dict(*args, **kwargs)
+ logger.info(f'request_dict is {request_dict}')
+ async with aiohttp.ClientSession() as session:
+ url = f'http://{self.host}:{self.port}/{function}'
+ logger.info(f'run function with url {url} and data {request_dict}')
+ async with session.post(url, json=request_dict) as resp:
+ return await resp.json()
\ No newline at end of file
diff --git a/piper/base/virtualenv/__init__.py b/piper/base/virtualenv/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/piper/base/virtualenv/templates/bash-create-tests.j2 b/piper/base/virtualenv/templates/bash-create-tests.j2
new file mode 100644
index 0000000..6cac142
--- /dev/null
+++ b/piper/base/virtualenv/templates/bash-create-tests.j2
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+source {{ name_venv }}/bin/activate
+pip3 install pytest
+
+python3 -m pytest
\ No newline at end of file
diff --git a/piper/base/virtualenv/templates/bash-create-venv.j2 b/piper/base/virtualenv/templates/bash-create-venv.j2
new file mode 100644
index 0000000..6773b49
--- /dev/null
+++ b/piper/base/virtualenv/templates/bash-create-venv.j2
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+
+NAMEVENV={{ name_path }}{{ name_venv }}
+
+pip3 install virtualenv
+
+python3 -m virtualenv $NAMEVENV
+source $NAMEVENV/bin/activate
+
+pip3 install -r {{ name_path }}requirements.txt
+
+python3 {{ name_path }}main.py {{ number }}
diff --git a/piper/base/virtualenv/templates/python-script-tests.j2 b/piper/base/virtualenv/templates/python-script-tests.j2
new file mode 100644
index 0000000..6276b5c
--- /dev/null
+++ b/piper/base/virtualenv/templates/python-script-tests.j2
@@ -0,0 +1,8 @@
+import pytest
+
+from piper_new_out.main import sum_factorial
+
+
+def test_base_func_from_main():
+ res_from_main = sum_factorial({{ number }})
+ assert res_from_main == 4037913
diff --git a/piper/base/virtualenv/templates/python-script-venv.j2 b/piper/base/virtualenv/templates/python-script-venv.j2
new file mode 100644
index 0000000..f45a43c
--- /dev/null
+++ b/piper/base/virtualenv/templates/python-script-venv.j2
@@ -0,0 +1,20 @@
+import argparse
+
+
+def sum_factorial(number: int) -> int:
+ partial_factorial = 1
+ partial_sum = 0
+ for i in range(1, number + 1):
+ partial_factorial *= i
+ partial_sum += partial_factorial
+ return partial_sum
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument("number", type=int)
+ args = parser.parse_args()
+ if args.number:
+ print(f'Sum of factorial to {args.number} = {sum_factorial(args.number)}')
+ else:
+ print('No positional arguments in cli')
diff --git a/piper/base/virtualenv/utils.py b/piper/base/virtualenv/utils.py
new file mode 100644
index 0000000..d77d84b
--- /dev/null
+++ b/piper/base/virtualenv/utils.py
@@ -0,0 +1,79 @@
+import os
+
+import jinja2
+
+from piper.utils.logger_utils import logger
+
+
+class VenvPython:
+
+ def __init__(
+ self,
+ name_path: str,
+ name_venv: str,
+ number: int,
+ ):
+ self.name_path = name_path
+ self.name_venv = name_venv
+ self.number = number
+
+ def render_venv_bash(self):
+ """
+ Render bash script for create and activate venv
+ """
+ logger.info('Render bash script for create and activate venv')
+
+ template_dir = os.path.join(os.path.dirname(__file__), 'templates')
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ trim_blocks=True,
+ lstrip_blocks=True)
+ template = jinja_env.get_template('bash-create-venv.j2')
+ return template.render(
+ name_path=self.name_path,
+ name_venv=self.name_venv,
+ number=self.number,
+ )
+
+ @staticmethod
+ def render_venv_python():
+ """
+ Render main file for virtual env logic
+ """
+ logger.info('Render main file for virtual env logic')
+
+ template_dir = os.path.join(os.path.dirname(__file__), 'templates')
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ trim_blocks=True,
+ lstrip_blocks=True)
+ template = jinja_env.get_template('python-script-venv.j2')
+ return template.render()
+
+ def render_tests_bash(self):
+ """
+ Render bash script for create and activate venv
+ """
+ logger.info('Render bash script for create and activate venv')
+
+ template_dir = os.path.join(os.path.dirname(__file__), 'templates')
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ trim_blocks=True,
+ lstrip_blocks=True)
+ template = jinja_env.get_template('bash-create-tests.j2')
+ return template.render(
+ name_venv=self.name_venv,
+ )
+
+ def render_tests_python(self):
+ """
+ Render bash script for create and activate venv
+ """
+ logger.info('Render bash script for create and activate venv')
+
+ template_dir = os.path.join(os.path.dirname(__file__), 'templates')
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ trim_blocks=True,
+ lstrip_blocks=True)
+ template = jinja_env.get_template('python-script-tests.j2')
+ return template.render(
+ number=self.number,
+ )
diff --git a/piper/base/virtualenv/venv_executors.py b/piper/base/virtualenv/venv_executors.py
new file mode 100644
index 0000000..1bef436
--- /dev/null
+++ b/piper/base/virtualenv/venv_executors.py
@@ -0,0 +1,94 @@
+import inspect
+import os
+import subprocess
+from typing import Dict
+
+from piper.base.virtualenv.utils import VenvPython
+from piper.configurations import get_configuration
+from piper.utils.logger_utils import logger
+
+
+def copy_piper(path: str):
+ cfg = get_configuration()
+ from distutils.dir_util import copy_tree
+ copy_tree(cfg.piper_path, f"{path}/piper")
+
+
+def copy_scripts(path: str, scripts: Dict[str, str]):
+ for script_name, script_path in scripts.items():
+ with open(f"{path}/{script_name}.py", "w") as output:
+ with open(script_path, "r") as current_file:
+ output.write(current_file.read())
+
+
+def write_requirements(path, requirements):
+ with open(f"{path}/requirements.txt", "w") as output:
+ output.write("\n".join(requirements))
+
+
+class VirtualEnvExecutor:
+ requirements = ["gunicorn", "fastapi", "uvicorn", "aiohttp", "docker", "Jinja2", "pydantic", "loguru"]
+
+ def __init__(self):
+ logger.info('VirtualEnvExecutor init with is_virtual_env()')
+
+ cfg = get_configuration()
+ self.project_output_path = cfg.path
+ self.name_venv = cfg.name_venv
+ self.number = cfg.number
+
+ def scripts(self):
+ return {"service": inspect.getfile(self.__class__)}
+
+ def copy_struct_project(self):
+ copy_piper(self.project_output_path)
+ copy_scripts(self.project_output_path, self.scripts())
+
+ def create_files_for_venv(self):
+ logger.info('VirtualEnvExecutor create_fast_api_files_venv()')
+
+ venv_python_image = VenvPython(
+ name_path=self.project_output_path,
+ name_venv=self.name_venv,
+ number=self.number,
+ )
+
+ venv_main = venv_python_image.render_venv_python()
+ with open(f"{self.project_output_path}/main.py", "w") as output:
+ output.write(venv_main)
+
+ venv_bash = venv_python_image.render_venv_bash()
+ with open(f"{self.project_output_path}/create_venv.sh", "w") as output:
+ output.write(venv_bash)
+
+ write_requirements(self.project_output_path, self.requirements)
+
+ process_chmod = subprocess.run(f'chmod +x {self.project_output_path}create_venv.sh', shell=True)
+ process_run = subprocess.run(f'{self.project_output_path}create_venv.sh', shell=True)
+
+ def create_files_for_tests(self):
+ logger.info('VirtualEnvExecutor create_files_for_tests()')
+
+ with open(f"{self.project_output_path}/__init__.py", "w") as output:
+ pass
+
+ tests_directory = f"{self.project_output_path}/tests"
+ if not os.path.exists(tests_directory):
+ os.makedirs(tests_directory)
+
+ with open(f"{self.project_output_path}/tests/__init__.py", "w") as output:
+ pass
+
+ venv_python_image = VenvPython(
+ name_path=self.project_output_path,
+ name_venv=self.name_venv,
+ number=self.number,
+ )
+
+ test_main = venv_python_image.render_tests_python()
+ with open(f"{self.project_output_path}/tests/test_main.py", "w") as output:
+ output.write(test_main)
+
+ test_bash = venv_python_image.render_tests_bash()
+ with open(f"{self.project_output_path}/test_venv.sh", "w") as output:
+ output.write(test_bash)
diff --git a/piper/configurations.py b/piper/configurations.py
index a5dd151..131a5dd 100644
--- a/piper/configurations.py
+++ b/piper/configurations.py
@@ -1,11 +1,19 @@
+import time
+
+
class Configuration:
- path = "/Users/olegsokolov/PycharmProjects/piper/applications"
- path = "/home/pavel/repo/piper_new_out/"
- test_path = "/home/pavel/repo/piper_test_out/"
- piper_path = "piper"
- default_env = "docker"
- docker_app_port = 8788
- env = None
+ path: str = f"./applications/piper_project_{time.time_ns()}/"
+ test_path: str = f"./applications/piper_project_{time.time_ns()}/"
+ piper_path: str = "piper"
+ default_env: str = "compose"
+ docker_app_port: int = 8788
+
+ name_venv: str = "venv_test"
+ number: int = 10
+
+ env: str = None
+ ignore_import_errors: bool = True
+ safe_import_activated: bool = False
# start time and counter
wait_on_iter = 0.5
@@ -13,7 +21,7 @@ class Configuration:
# docker start time and counter
docker_wait_on_iter = 0.5
- docker_n_iters = 20
+ docker_n_iters = 20
n_gunicorn_workers = 1
@@ -54,8 +62,6 @@ class Configuration:
ts_config_row = r'--oem 1 --psm 11'
ts_config = {'ts_lang': 'eng', 'ts_config_row': r'--oem 1 --psm 11'}
-
-
# models and where to find them
spacy_models = set(
[
@@ -66,5 +72,6 @@ class Configuration:
)
model_path = '/app/models'
+
def get_configuration():
return Configuration
diff --git a/piper/envs/__init__.py b/piper/envs/__init__.py
index de363d3..99b1179 100644
--- a/piper/envs/__init__.py
+++ b/piper/envs/__init__.py
@@ -1,12 +1,22 @@
+from piper.base.docker_compose.compose_executors import ComposeExecutor
+from piper.base.virtualenv.venv_executors import VirtualEnvExecutor
from piper.configurations import get_configuration
+from piper.utils.logger_utils import logger
cfg = get_configuration()
def init_default_env():
- # INITIALIZE ENVIRONMENT FROM CONFIGURATION
+ """
+ This method initialize the default environment by the string name from the configuration.
+ CurrentEnv means default python where piper is installed.
+ """
if cfg.default_env == "docker":
set_env(DockerEnv())
+ elif cfg.default_env == "virtualenv":
+ set_env(VirtualEnv())
+ elif cfg.default_env == "compose":
+ set_env(ComposeEnv())
else:
set_env(CurrentEnv())
@@ -22,34 +32,66 @@ def set_env(env):
cfg.env = env
-class DockerEnv:
+class Env:
+ name = "no_env"
- def __init__(self):
- pass
+ _subclasses = []
def __enter__(self):
- print("Entering DockerEnv")
+ logger.info(f"Entering Env: {self.__class__.__name__}")
self._old_environment = get_env()
set_env(self)
def __exit__(self, *args, **kws):
- print("Exiting DockerEnv")
+ logger.info(f"Exit Env: {self.__class__.__name__}")
set_env(self._old_environment)
+ @classmethod
+ def get_all_envs(cls):
+ return list(cls._subclasses)
+
+ def __init_subclass__(cls):
+ Env._subclasses.append(cls)
+
-class CurrentEnv:
+class DockerEnv(Env):
+ name = "docker"
def __init__(self):
pass
+
+class CurrentEnv(Env):
+ name = "current_env"
+
+ def __init__(self):
+ pass
+
+
+class VirtualEnv(Env):
+ name = "virtualenv"
+
+ def __init__(self):
+ self.__resource = VirtualEnvExecutor()
+
def __enter__(self):
- print("Entering CurrentEnv")
- self._old_environment = get_env()
- set_env(self)
+ super().__enter__()
+ return self.__resource
+
+
+class ComposeEnv(Env):
+ name = "compose"
+
+ def __init__(self):
+ self.__resource = ComposeExecutor()
+
+ def __enter__(self):
+ super().__enter__()
+ return self.__resource
def __exit__(self, *args, **kws):
- print("Exiting CurrentEnv")
- set_env(self._old_environment)
+ super().__exit__(* args, ** kws)
+ # self.__resource.stop_compose()
def is_current_env():
@@ -58,3 +100,11 @@ def is_current_env():
def is_docker_env():
return get_env().__class__.__name__ == "DockerEnv"
+
+
+def is_virtual_env():
+ return get_env().__class__.__name__ == "VirtualEnv"
+
+
+def is_compose_env():
+ return get_env().__class__.__name__ == "ComposeEnv"
diff --git a/piper/imports.py b/piper/imports.py
new file mode 100644
index 0000000..61208c0
--- /dev/null
+++ b/piper/imports.py
@@ -0,0 +1,154 @@
+import builtins
+import inspect
+from types import ModuleType
+
+from piper.utils.logger_utils import logger
+from piper.configurations import get_configuration
+
+configuration = get_configuration()
+
+
+def _empty_import():
+ logger.error("Import Not Installed Yet!")
+ raise ImportError
+
+
+real_import = _empty_import
+
+
+class PiperDummyModule(ModuleType):
+
+ def __init__(self, name):
+ super().__init__(name)
+ logger.info(f"Piper emulates {name} module")
+
+ def __getattr__(self, name):
+ return PiperDummyModule(name)
+
+ __all__ = []
+
+
+def _piper_was_touched_in_frame(frame_before=1):
+ call_function_frame = inspect.currentframe().f_back
+ frame = call_function_frame
+ for i in range(frame_before):
+ frame = frame.f_back
+
+ result = False
+ f_locals = frame.f_locals
+ f_globals = frame.f_globals
+
+ # for python 3.8
+ all_variables = dict()
+ for k,v in f_locals.items():
+ all_variables[k] = v
+
+ for k,v in f_globals.items():
+ all_variables[k] = v
+
+ # all_variables = dict(f_locals.items() | f_globals.items())
+
+ # for python 3.9
+ # all_variables = f_locals | f_globals
+
+ if all_variables.values():
+ all_variables = [v for v in all_variables.values() if v is not None]
+ if len(all_variables) > 0:
+ variables_have_piper_package = any("piper" in v.__package__ \
+ for v in all_variables if hasattr(v, "__package__") and type(v.__package__) == str)
+ variables_have_piper_module = any("piper" in v.__module__ \
+ for v in all_variables if hasattr(v, "__module__") and type(v.__module__) == str)
+ result = variables_have_piper_module | variables_have_piper_package
+
+ return result
+
+
+def _from_piper_file_but_not_piper(name: str, globals={}):
+ is_import_from_piper_source_code = "__file__" in globals and "piper/" in globals["__file__"]
+ not_piper_import = not ("piper" in name)
+ result = is_import_from_piper_source_code and not_piper_import
+
+ return result
+
+
+def try_import(name, globals={}, locals={}, fromlist=[], level=0):
+ """
+ This import replace real Python import with fake import which returns warning only and PiperDummyModule.
+ This works for everything under piper/ frameworks files by filename but not for piper import (like piper.base)
+ And this also works for every file where you import something from piper firstly !
+ """
+ if not (configuration.ignore_import_errors or configuration.safe_import_activated):
+ logger.info("Ignore import errors is off in Configuration and deactivated")
+ return real_import(name, globals, locals, fromlist, level)
+
+ piper_was_touched_in_previous_frame = _piper_was_touched_in_frame(frame_before=1)
+ need_to_catch = piper_was_touched_in_previous_frame or _from_piper_file_but_not_piper(name, globals)
+
+ if need_to_catch:
+ logger.info(f"Piper runs safe import for library {name} in piper file {globals['__file__']} ")
+ try:
+ return real_import(name, globals, locals, fromlist, level)
+ except ImportError as e:
+ logger.warning(f"Piper ignores ImportError and module {name} "
+ f": replaced by dummy module. ImportError: {e.with_traceback(None)}")
+ module = PiperDummyModule(name)
+ return module
+ else:
+ return real_import(name, globals, locals, fromlist, level)
+
+
+"""
+Here Piper saves default Python *import* only.
+"""
+if builtins.__import__ != try_import:
+ real_import = builtins.__import__
+
+
+def _set_import_functions(ignore: bool = True):
+ if ignore:
+ builtins.__import__ = try_import
+ else:
+ builtins.__import__ = real_import
+
+
+def activate_safe_import():
+ """
+ Activate piper safe import with try_import function.
+ Piper needs safe import to ignore imports in Executors examples.
+ For instance if you want to use Pandas in your CustomExecutor normally you have to *import pandas*
+ But we don't want to install everything for every executor in default Python (where Piper is installed)
+ For that you have to ignore every Executors dependencies.
+
+ Otherwise, you can wrap buy yourself every Executors import with try_import
+ or you can use directly only *requirements* field in your CustomExecutor.
+
+ """
+ logger.info(f"Piper activates safe import")
+ configuration.safe_import_activated = True
+ _set_import_functions(ignore=True)
+
+
+def deactivate_safe_import():
+ logger.info(f"Piper deactivates safe import")
+ configuration.safe_import_activated = False
+ _set_import_functions(ignore=configuration.ignore_import_errors)
+
+
+class safe_import:
+ """
+ Context manager to activate safe import on some part of imports.
+ For instance:
+
+ with safe_import():
+ import foo
+ import bar
+
+ foo would be ignored and replaced by PiperDummyModule
+ boo wouldn't be replaced (you can catch ImportError)
+ """
+
+ def __enter__(self):
+ activate_safe_import()
+
+ def __exit__(self, type, value, traceback):
+ deactivate_safe_import()
diff --git a/piper/services/__init__.py b/piper/services/__init__.py
index 62c7810..2c6dcc3 100644
--- a/piper/services/__init__.py
+++ b/piper/services/__init__.py
@@ -1,47 +1,48 @@
-from piper.base.executors import FastAPIExecutor, FastAPITesseractExecutor
-from fastapi.responses import JSONResponse
+import sys
+
+from piper.imports import safe_import
+
+with safe_import():
+ import spacy
+ from fastapi.responses import JSONResponse
+# from loguru import logger
+from piper.utils.logger_utils import logger
from pydantic import BaseModel
-from loguru import logger
-import json
-import spacy
-import sys
+
+from piper.base.executors import FastAPIExecutor, FastAPITesseractExecutor
from piper.configurations import get_configuration
-from piper.utils import tesrct_utils as tu
+from piper.utils.TesseractOCR import tesseract_ocr as tu
+from piper.utils.FaceDetection import face_detector as fru
-logger.add("file.log", level="INFO", backtrace=True, diagnose=True, rotation='5 MB')
+# logger.add("file.log", level="INFO", backtrace=True, diagnose=True, rotation='5 MB')
+from piper.base.executors.fastapi import FastAPIFaceDetectorExecutor
class StringValue(BaseModel):
value: str
+
class BytesObject(BaseModel):
value: bytes
+
class ListOfStringsObject(BaseModel):
value: list
-class TestMessageAdder(FastAPIExecutor):
-
- def __init__(self, appender="TEST", **kwargs):
- self.appender = appender
- super().__init__(**kwargs)
-
- async def run(self, message: StringValue) -> StringValue:
- return StringValue(value=(message.value + self.appender))
-
class TesseractRecognizer(FastAPITesseractExecutor):
'''
Tesseract OCR implementation service
'''
+
def __init__(self, **kwargs):
super().__init__(**kwargs)
cfg = get_configuration()
self.ts_config = cfg.ts_config
- def set_config_(self, config_):
+ def set_config_(self, config_):
if 'ts_lang' not in config_.keys():
logger.error(f'tesseract config keys must contains ts_lang, keys {config_.keys()}')
logger.error(f'tesseract config did not set')
@@ -59,9 +60,9 @@ async def sconfig(self, conf) -> ListOfStringsObject:
# conf = '12'
logger.info(f'request to set config to {conf}')
self.set_config_(conf)
- return JSONResponse(content={'text':'OK'})
-
- async def recognize(self, file_content : BytesObject, suf: str) -> ListOfStringsObject:
+ return JSONResponse(content={'text': 'OK'})
+
+ async def recognize(self, file_content: BytesObject, suf: str) -> ListOfStringsObject:
logger.info(f'file_content {type(file_content)}, file suffix is {suf}')
logger.info(f'current tesseract config is {self.ts_config}')
@@ -69,13 +70,14 @@ async def recognize(self, file_content : BytesObject, suf: str) -> ListOfStrings
logger.info(f'img_bytes_handler return {type(text_dict)} object')
return JSONResponse(content=text_dict)
- async def ner(self, txt: str):
+ async def ner(self, txt: str):
sn = SpacyNER()
if sn.available_models and len(sn.available_models) > 0:
dummy_model = sn.available_models[0]
sn.set_model(dummy_model)
return JSONResponse(content=sn.extract_named_ents(txt))
+
# class ModelNameNotInList(BaseException):
# def __init__(self, msg):
# # pass
@@ -86,6 +88,7 @@ class SpacyNER():
'''
Spacy NER service
'''
+
def __init__(self):
cfg = get_configuration()
self.available_models = set()
@@ -102,26 +105,24 @@ def __init__(self):
logger.error(f'catch exception {e}')
sys.exit()
-
def set_model(self, cur_model):
if cur_model not in self.available_models:
logger.error(f'there is not {cur_model} in available_models set: {self.available_models}')
self.nlp = None
raise ValueError(f'there is not {cur_model} in available_models set: {self.available_models}')
- try:
+ try:
nlp = spacy.load(cur_model)
# nlp = spacy.load('en_default')
logger.info('spacy nlp object created with model {cur_model}')
except Exception as e:
logger.error(f'catch exception {e}')
- if isinstance(e, OSError):
+ if isinstance(e, OSError):
logger.error(f'you must download spacy model {cur_model}')
nlp = None
logger.info('spacy nlp object DID NOT create')
-
- self.nlp = nlp
+ self.nlp = nlp
def extract_named_ents(self, txt: str):
logger.debug(f'got data type {type(txt)} and data <<{txt}>> for NER')
@@ -129,7 +130,23 @@ def extract_named_ents(self, txt: str):
res = []
doc = self.nlp(txt)
for ent in doc.ents:
- res.append((ent.text, ent.label_))
+ res.append((ent.text, ent.label_))
return JSONResponse(content=res)
else:
logger.error(f'nlp object didn`t create. you should use set_model(model_name)')
+
+
+class FaceDetector(FastAPIFaceDetectorExecutor):
+ '''
+ FaceDetector implementation service
+ '''
+ def __init__(self, **kwargs):
+ self.face_detector = fru.FaceDetector()
+ super().__init__(**kwargs)
+
+
+ async def recognize(self, file_content : BytesObject, suf: str) -> ListOfStringsObject:
+ logger.info(f'face_detector recive {type(file_content)} object')
+ text_dict = self.face_detector.bytes_handler(file_content)
+ logger.info(f'face_detector img_bytes_handler return {(text_dict)} object')
+ return JSONResponse(content=text_dict)
\ No newline at end of file
diff --git a/piper/utils/FaceDetection/face_detector.py b/piper/utils/FaceDetection/face_detector.py
new file mode 100755
index 0000000..a0350c0
--- /dev/null
+++ b/piper/utils/FaceDetection/face_detector.py
@@ -0,0 +1,39 @@
+import sys
+from loguru import logger
+
+try:
+ from mtcnn import MTCNN
+ import numpy as np
+ import cv2
+
+except ImportError as e:
+ logger.error(e)
+
+
+class FaceDetector():
+
+ def __init__(self):
+ self.detector = None
+ logger.info('FaceRecognizer model is MTCNN')
+
+ def bytes_handler(self, img_bytes):
+ '''Detect faces on recived image and return their coordinates'''
+
+ logger.info(f'bytes_handler with arg {type(img_bytes)} and len {sys.getsizeof(img_bytes)}')
+ np_array = np.asarray(bytearray(img_bytes), dtype="uint8")
+ logger.info(f'converted image is type of {type(np_array)} and size {np_array.shape}')
+ img = cv2.imdecode(np_array, cv2.IMREAD_COLOR)
+
+ if img is not None:
+ logger.info(f'converted to cv2 image with shape {img.shape}')
+ if img is not None:
+ h, w, _ = img.shape
+ if h > 0 and w > 0:
+ detector = MTCNN()
+ # logger.info(f'detector is {self.detector}')
+ logger.info('start detect faces')
+ detections = detector.detect_faces(img)
+ logger.info(f'detections is {detections}')
+ return detections
+ else:
+ logger.error('can not convert bytes to cv2 image')
\ No newline at end of file
diff --git a/piper/utils/TesseractOCR/tesseract_ocr.py b/piper/utils/TesseractOCR/tesseract_ocr.py
new file mode 100755
index 0000000..3f916a5
--- /dev/null
+++ b/piper/utils/TesseractOCR/tesseract_ocr.py
@@ -0,0 +1,85 @@
+from loguru import logger
+
+try:
+ import pdf2image
+ import pytesseract
+
+except ImportError as ie:
+ print(ie)
+
+import numpy as np
+import cv2
+
+import requests
+from piper.configurations import get_configuration
+
+cfg = get_configuration()
+
+def send_file_to_ocr_service(url, file_path):
+ '''Load file by path and sent it to OCR service by url'''
+ multipart_form_data = {
+ 'file': open(file_path, 'rb')
+ }
+
+ try:
+ result = requests.post(url, files=multipart_form_data, verify=False)
+ return result
+
+ except requests.exceptions.ConnectionError as ce:
+ logger.error(f'exeption while connect to {url}')
+ logger.error(ce)
+
+def img_to_text(img, ts_conf):
+ '''Get text from image with Tesseract'''
+ logger.info(f'pytesseract process file with len {len(img)}')
+ txt_dict = pytesseract.image_to_data(
+ img,
+ lang=ts_conf.get('lang'),
+ config=ts_conf.get('ts_config_row'),
+ output_type=pytesseract.Output.DICT
+ )
+ return txt_dict
+
+
+def bytes_handler(bbytes, suf, ts_conf):
+ '''Process bytes as image or as PDF document'''
+ if suf in cfg.image_suffixes:
+ logger.info('bytes are image')
+ return img_bytes_handler(bbytes, ts_conf)
+ elif suf in cfg.pdf_suffixes:
+ logger.info('bytes are pdf document')
+ return pdf_bytes_handler(bbytes, ts_conf)
+
+
+def pdf_bytes_handler(pdf_bytes, ts_conf):
+ '''Process PDF document'''
+ bytes_to_images = pdf2image.convert_from_bytes(
+ pdf_bytes,
+ thread_count=cfg.thread_count,
+ dpi=cfg.dpi
+ )
+ logger.info(f'pdf to image return {bytes_to_images} pages')
+ pages = [np.asarray(x) for x in bytes_to_images]
+ #TODO add processing all pages
+ if len(pages) > 0:
+ logger.error(f'try to recognize pages {len(pages)}')
+ txt_dict = img_to_text(pages[0], ts_conf)
+ logger.info(f'img_to_text returned {txt_dict}')
+ return txt_dict
+ else:
+ logger.error('no pdf pages to recognize')
+
+
+def img_bytes_handler(img_bytes, ts_conf):
+ '''Process image'''
+ img = cv2.imdecode(np.asarray(bytearray(img_bytes), dtype=np.uint8), cv2.IMREAD_COLOR)
+ if img is not None:
+ logger.info(f'processing img with shape {img.shape}')
+ txt_dict = img_to_text(img, ts_conf)
+
+ logger.info(f'get text from image {txt_dict}')
+ logger.info(f'img_to_text returned {txt_dict}')
+ return txt_dict
+
+ else:
+ logger.error('recive empty image or convertion failed')
diff --git a/piper/utils/docker_utils.py b/piper/utils/docker_utils.py
index 57975c6..98ee0bd 100644
--- a/piper/utils/docker_utils.py
+++ b/piper/utils/docker_utils.py
@@ -1,183 +1,245 @@
-import docker
-import time
-import sys
-from loguru import logger
-
-from piper.configurations import get_configuration
-
-cfg = get_configuration()
-
-def get_image(docker_client, image_name):
- try:
- cur_image = docker_client.images.get(image_name)
- return cur_image
- except docker.errors.ImageNotFound:
- logger.info(f'image with tag {image_name} not found')
- return False
-
-
-def delete_image(docker_client, image_tag):
- try:
- docker_client.images.remove(image_tag, force=True)
- return True
- except Exception as e:
- logger.error('error while remove image', e)
- return False
-
-
-def get_container(docker_client, container_name):
- try:
- cur_container = docker_client.containers.get(container_name)
- return cur_container
- except docker.errors.NotFound:
- logger.info(f'container with name {container_name} not found')
- return False
- except Exception as e:
- logger.error(f'non defined exeption {e}')
- return False
-
-def get_container_with_status(docker_client, container_name):
- try:
- cur_container = docker_client.containers.get(container_name)
- if cur_container:
- status = cur_container.status
- cont_id = cur_container.id
- return cur_container, status, cont_id
- except docker.errors.NotFound:
- logger.info(f'container with name {container_name} not found')
- return False
- except Exception as e:
- logger.error(f'non defined exeption {e}')
- return False
-
-def stop_container(docker_client, container_name):
- try:
- cur_container = docker_client.containers.get(container_name)
- cur_container.stop()
- return True
- except docker.errors.NotFound:
- logger.error(f'container for stop with name {container_name} not found')
- return False
- except docker.errors.APIError:
- logger.error(f'error while stop container {container_name}')
- return False
-
-
-def remove_container(docker_client, container_name):
- try:
- cur_container = docker_client.containers.get(container_name)
- cur_container.remove(v=True, force=True)
- return True
- except docker.errors.NotFound:
- logger.error(f'container for stop with name {container_name} not found')
- return False
- except docker.errors.APIError as de:
- logger.error(f'error while remove container {container_name}')
- logger.error(de)
- return False
-
-
-def stop_and_rm_container(docker_client, container_name):
- # get container
- cur_container = get_container(docker_client, container_name)
-
- # get container status
- if not cur_container:
- logger.info(f'container {container_name} not found')
- return 'deleted'
- else:
- status = cur_container.status
- cont_id = cur_container.id
- logger.info('status', status, type(status))
-
- if status == 'running':
- logger.info(f'container {container_name} started already. Stop it!')
- # stop
- stop_result = stop_container(docker_client, cont_id)
- logger.info('stoped', stop_result)
- status = 'exited'
- else:
- logger.info("status not running")
-
- if status == 'exited':
- logger.info(f'container {container_name} exists already. Remove it!')
- # rm
- remove_result = remove_container(docker_client, cont_id)
- logger.info('removed, remove_result is ', remove_result)
- status = 'deleted'
- else:
- logger.info("status not exited")
- return status
-
-
-def image_find_and_rm(docker_client, image_tag):
- cur_img = get_image(docker_client, image_tag)
- logger.info(cur_img)
- if cur_img:
- logger.info(f'image {image_tag} exists')
- logger.info(f'cur_img is {cur_img}, ID is {cur_img.id}')
- del_result = delete_image(docker_client, image_tag)
- logger.info(f'del_result of image {del_result}')
- return del_result
- else:
- # не нужно ничего удалять, контейнера нет
- return True
-
-
-def create_image_and_container_by_dockerfile(docker_client, path, image_tag, container_name, port):
- # should be deleted
- status = stop_and_rm_container(docker_client, container_name)
-
- cur_cont = get_container(docker_client, container_name)
- if cur_cont:
- logger.error(f'container not deleted, {cur_cont}')
- sys.exit()
-
- # remove image
- if status == 'deleted':
- # remove image
- del_result = image_find_and_rm(docker_client, image_tag)
- if del_result:
- # create new image
- image, logs = docker_client.images.build(
- path=path,
- tag=image_tag,
- quiet=False,
- rm=True, # creates image only without container
- forcerm=True,
- timeout=20
- )
- for log in logs:
- logger.debug(log)
- logger.info(f'image {image} created')
-
- # run container
- try:
- container = docker_client.containers.run(image_tag, name=container_name, detach=True, ports={8080:port})
- for log in container.logs():
- logger.debug(log)
- logger.info(f'container {container} created')
-
- i = 0
- while True:
- i += 1
- # logger.info(get_container_with_status(docker_client, container_name))
- container.reload()
- logger.info(f'container.status {container.status}')
- if container.status == 'running':
- break
-
- if i == cfg.docker_n_iters:
- logger.error(f'container {container_name} can`t start, status is {container.status}')
- sys.exit()
- time.sleep(cfg.docker_wait_on_iter)
-
-
- except docker.errors.APIError as api_e:
- logger.error(f'eroror while run container {container_name}')
- logger.error(str(api_e))
- sys.exit()
- else:
- logger.error(f'error while del image {image_tag}')
- sys.exit()
-
+import docker
+import time
+import sys
+from loguru import logger
+
+from typing import Dict
+from docker.errors import APIError, BuildError
+from piper.configurations import get_configuration
+
+cfg = get_configuration()
+
+def get_image(docker_client, image_name):
+ '''Get Python object of Docker image by name'''
+ try:
+ cur_image = docker_client.images.get(image_name)
+ return cur_image
+ except docker.errors.ImageNotFound:
+ logger.info(f'image with tag {image_name} not found')
+ return False
+
+
+def delete_image(docker_client, image_tag):
+ '''Delete Docker image by name'''
+ try:
+ docker_client.images.remove(image_tag, force=True)
+ return True
+ except Exception as e:
+ logger.error('error while remove image', e)
+ return False
+
+
+def get_container(docker_client, container_name):
+ '''Get Python object of Docker container by name'''
+ try:
+ cur_container = docker_client.containers.get(container_name)
+ return cur_container
+ except docker.errors.NotFound:
+ logger.info(f'container with name {container_name} not found')
+ return False
+ except Exception as e:
+ logger.error(f'non defined exeption {e}')
+ return False
+
+def get_container_with_status(docker_client, container_name):
+ '''Get Python object of Docker container by name with its status and ID'''
+ try:
+ cur_container = docker_client.containers.get(container_name)
+ if cur_container:
+ status = cur_container.status
+ cont_id = cur_container.id
+ return cur_container, status, cont_id
+ except docker.errors.NotFound:
+ logger.info(f'container with name {container_name} not found')
+ return False
+ except Exception as e:
+ logger.error(f'non defined exeption {e}')
+ return False
+
+def stop_container(docker_client, container_name):
+ '''Stop Docker container by name'''
+ try:
+ cur_container = docker_client.containers.get(container_name)
+ cur_container.stop()
+ return True
+ except docker.errors.NotFound:
+ logger.error(f'container for stop with name {container_name} not found')
+ return False
+ except docker.errors.APIError:
+ logger.error(f'error while stop container {container_name}')
+ return False
+ except Exception as e:
+ logger.error(f'non defined exeption {e}')
+ return False
+
+
+def remove_container(docker_client, container_name):
+ '''Remove stopped Docker container by name'''
+ try:
+ cur_container = docker_client.containers.get(container_name)
+ cur_container.remove(v=True, force=True)
+ return True
+ except docker.errors.NotFound:
+ logger.error(f'container for stop with name {container_name} not found')
+ return False
+ except docker.errors.APIError as de:
+ logger.error(f'error while remove container {container_name}')
+ logger.error(de)
+ return False
+ except Exception as e:
+ logger.error(f'non defined exeption {e}')
+ return False
+
+
+def stop_and_rm_container(docker_client, container_name):
+ '''Stop and remove Docker container by name'''
+ # get container
+ cur_container = get_container(docker_client, container_name)
+
+ # get container status
+ if not cur_container:
+ logger.info(f'container {container_name} not found')
+ return 'deleted'
+ else:
+ status = cur_container.status
+ cont_id = cur_container.id
+ logger.info('status', status, type(status))
+
+ if status == 'running':
+ logger.info(f'container {container_name} started already. Stop it!')
+ # stop
+ stop_result = stop_container(docker_client, cont_id)
+ logger.info('stoped', stop_result)
+ status = 'exited'
+ else:
+ logger.info("status not running")
+
+ if status == 'exited':
+ logger.info(f'container {container_name} exists already. Remove it!')
+ # rm
+ remove_result = remove_container(docker_client, cont_id)
+ logger.info('removed, remove_result is ', remove_result)
+ status = 'deleted'
+ else:
+ logger.info("status not exited")
+ return status
+
+
+def image_find_and_rm(docker_client, image_tag):
+ '''Remove Docker image by name'''
+ cur_img = get_image(docker_client, image_tag)
+ logger.info(cur_img)
+ if cur_img:
+ logger.info(f'image {image_tag} exists')
+ logger.info(f'cur_img is {cur_img}, ID is {cur_img.id}')
+ del_result = delete_image(docker_client, image_tag)
+ logger.info(f'del_result of image {del_result}')
+ return del_result
+ else:
+ # не нужно ничего удалять, контейнера нет
+ return True
+
+
+def create_image_and_container_by_dockerfile(docker_client, path, image_tag, container_name, port):
+ '''Create Docker container from Dockerfile. If container exists it will be deleted with image'''
+
+ # delete existing container
+ status = stop_and_rm_container(docker_client, container_name)
+
+ cur_cont = get_container(docker_client, container_name)
+ if cur_cont:
+ logger.error(f'container not deleted, {cur_cont}')
+ sys.exit()
+
+ # remove image
+ if status == 'deleted':
+ # remove image
+ del_result = image_find_and_rm(docker_client, image_tag)
+ if del_result:
+ # create new image
+ image, logs = docker_client.images.build(
+ path=path,
+ tag=image_tag,
+ quiet=False,
+ rm=True, # creates image only without container
+ forcerm=True,
+ timeout=20
+ )
+ for log in logs:
+ logger.debug(log)
+ logger.info(f'image {image} created')
+
+ # run container
+ try:
+ container = docker_client.containers.run(image_tag, name=container_name, detach=True, ports={8080:port})
+ for log in container.logs():
+ logger.debug(log)
+ logger.info(f'container {container} created')
+
+ i = 0
+ while True:
+ i += 1
+ # logger.info(get_container_with_status(docker_client, container_name))
+ container.reload()
+ logger.info(f'container.status {container.status}')
+ if container.status == 'running':
+ break
+
+ if i == cfg.docker_n_iters:
+ logger.error(f'container {container_name} can`t start, status is {container.status}')
+ sys.exit()
+ time.sleep(cfg.docker_wait_on_iter)
+
+ return container
+
+
+ except docker.errors.APIError as api_e:
+ logger.error(f'eroror while run container {container_name}')
+ logger.error(str(api_e))
+ sys.exit()
+ else:
+ logger.error(f'error while del image {image_tag}')
+ sys.exit()
+
+
+def build_image(path: str, tag):
+ '''Build Docker image from Dockerfile'''
+ client = docker.DockerClient(base_url='unix://var/run/docker.sock')
+ logger.info('build start!')
+
+ try:
+ print(path)
+ print(tag)
+ image, logs = client.images.build(path=path,
+ tag=tag,
+ quiet=False,
+ timeout=120)
+ # for log in logs:
+ # logger.info(f'executor build_image: {log}')
+ # logger.info(f'image is {image}')
+
+ except BuildError as e:
+ logger.error('BuildError while build_image:')
+ for line in e.build_log:
+ if 'stream' in line:
+ logger.error(line['stream'].strip())
+ sys.exit()
+
+ except APIError as e:
+ logger.error('APIError while build_image: server returned error')
+ sys.exit()
+
+ except Exception as e:
+ logger.error(f'non defined exeption {e}')
+ sys.exit()
+
+def run_container(image: str, ports: Dict[int, int]):
+ '''Run Docker container'''
+ client = docker.DockerClient(base_url='unix://var/run/docker.sock')
+ container = client.containers.run(image, detach=True, ports=ports)
+ for log in container.logs():
+ logger.info(f'executor run_container: {log}')
+ logger.info(f'container is {container}')
+ time.sleep(10)
+
+ return container
\ No newline at end of file
diff --git a/piper/utils/logger_utils.py b/piper/utils/logger_utils.py
new file mode 100644
index 0000000..b05ea39
--- /dev/null
+++ b/piper/utils/logger_utils.py
@@ -0,0 +1,16 @@
+import logging
+import sys
+
+logger = logging.getLogger()
+logger.setLevel(logging.DEBUG)
+
+handler = logging.StreamHandler(sys.stdout)
+handler.setLevel(logging.DEBUG)
+formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
+handler.setFormatter(formatter)
+logger.addHandler(handler)
+
+fh = logging.FileHandler('./piper_base_log.log')
+fh.setLevel(logging.DEBUG)
+fh.setFormatter(formatter)
+logger.addHandler(fh)
diff --git a/piper/utils/tesrct_utils.py b/piper/utils/tesrct_utils.py
index 1d17577..b505d3b 100644
--- a/piper/utils/tesrct_utils.py
+++ b/piper/utils/tesrct_utils.py
@@ -1,13 +1,19 @@
-import cv2
-import pytesseract
-import numpy as np
-from loguru import logger
-import pdf2image
+from piper.imports import safe_import
+with safe_import():
+ import cv2
+ import numpy as np
+ import pdf2image
+ import pytesseract
+
import requests
+# from loguru import logger
+from piper.utils.logger_utils import logger
+
from piper.configurations import get_configuration
cfg = get_configuration()
+
def send_file_to_service(url, file_path, json_key):
multipart_form_data = {
'file': open(file_path, 'rb')
@@ -27,6 +33,7 @@ def send_file_to_service(url, file_path, json_key):
logger.error(f'exeption while connect to {url}')
logger.error(ce)
+
def img_to_text(img, ts_conf):
logger.info(f'pytesseract process file with len {len(img)}')
txt_dict = pytesseract.image_to_data(
diff --git a/requirements.txt b/requirements.txt
index fa6642d..57a37fa 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,6 @@
-docker
aiohttp
+click==8.1.3
+docker
Jinja2
pydantic
loguru
\ No newline at end of file
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..2bb3002
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,36 @@
+import pathlib
+from io import open
+from os import path
+
+from setuptools import find_packages, setup
+
+# directory with current setup.py file
+HERE = pathlib.Path(__file__).parent
+
+# Readme file as text
+README = (HERE / "Readme.rst").read_text()
+
+# Automatically collects all_modules to requirements.txt for install_requires and set dependency links
+with open(path.join(HERE, 'requirements.txt'), encoding='utf-8') as f:
+ all_reqs = f.read().split('\n')
+install_requires = [x.strip() for x in all_reqs if ('git+' not in x) and (
+ not x.startswith('#')) and (not x.startswith('-'))]
+dependency_links = [x.strip().replace('git+', '') for x in all_reqs \
+ if 'git+' not in x]
+
+setup(
+ name='piper',
+ version='0.0.2',
+ packages=find_packages(), # list of all packages
+ install_requires=install_requires,
+ include_package_data=True,
+ python_requires='>=2.7', # any python greater than 2.7
+ entry_points='''
+ [console_scripts]
+ piper=piper.__main__:main
+ ''',
+ zip_safe=False,
+ long_description=README,
+ long_description_content_type="text/markdown",
+ dependency_links=dependency_links,
+)
diff --git a/tests/PiperOperator.py b/tests/PiperOperator.py
deleted file mode 100644
index 56761d7..0000000
--- a/tests/PiperOperator.py
+++ /dev/null
@@ -1,176 +0,0 @@
-import requests
-import json
-import os
-import sys
-from pprint import pprint
-from loguru import logger
-
-# root_dir = os.path.realpath(os.path.pardir)
-# logger.info(f'root dir is {root_dir}')
-# sys.path.insert(1, root_dir)
-# from piper.utils import tesrct_utils as tu
-
-
-HEADERS = {"Content-Type": "application/json"}
-NER_RESPONSE_KEY = 'body'
-
-class PiperOperatorException(BaseException):
- def __init__(self, msg):
- pass
- # logger.exception(msg)
-
-
-
-class FileLoadException(PiperOperatorException):
- def __init__(self, fn):
- self.fn = fn
- super().__init__(f'file {fn} can`t be loaded')
-
-
-class JSONGetKeyException(PiperOperatorException):
- def __init__(self, key):
- self.key = key
- super().__init__(f'can`t get JSON key {key}')
-
-
-class NoAvailableModelsException(PiperOperatorException):
- def __init__(self):
- super().__init__(f'there are no spacy models')
-
-
-def get_data_by_key_from_response(cur_response, k):
- j = cur_response.json()
- if not j and k not in j.keys():
- raise JSONGetKeyException(k)
- v = j.get(k)
- return v
-
-def get_data_by_key_from_url(url, key, post=True, data=None, file_name=""):
- try:
- if post:
- if file_name:
- logger.info(f'filename is {file_name}')
- multipart_form_data = {
- 'file': open(file_name, 'rb')
- }
- cur_response = requests.post(url, files=multipart_form_data, verify=False)
- else:
- cur_response = requests.post(url, headers=HEADERS, data=data)
-
- logger.debug(f'url is {url}, response is {cur_response}, content is {cur_response.content}')
- cur_response.raise_for_status()
- if key:
- logger.debug(f'try to get value for key {key}')
- # pprint(cur_response.json())
- val = get_data_by_key_from_response(cur_response, key)
- logger.debug(f'value for key is {val}')
- return val
- else:
- return cur_response
-
- else:
- cur_response = requests.get(url, headers=HEADERS, data=data)
- cur_response.raise_for_status()
- # logger.debug(f'response is {cur_response.text}')
- val = get_data_by_key_from_response(cur_response, key)
- return val
-
- except requests.exceptions.ConnectionError as ce:
- logger.exception(f'can`t connect to url: {ce}')
-
- except JSONGetKeyException as cjke:
- logger.exception(f'can`t get key from response: {cjke}')
-
- except Exception as e:
- logger.exception(f'error while processing url {url}: {e}')
-
-
-class PiperNLPWorker():
- '''
- simple class shows how to use piper NLPProcessor
- '''
-
- def __init__(self, base_url):
- self.base_url = base_url
-
- ### RECOGNIZE
- self.url_tsrct_cfg = f'{self.base_url}/set_config'
- self.url_rcg = f'{self.base_url}/recognize'
-
- ### NER
- # get all available SPACY models url
- self.url_spacy_all_models = f'{self.base_url}/get_ner_models'
- # set current SPACY model url
- self.url_spacy_set_model = f'{self.base_url}/set_ner_model'
- # get named entitys from text url
- self.url_spacy_get_NE = f'{self.base_url}/extract_named_ents'
-
-
- def get_available_ner_models(self):
- return get_data_by_key_from_url(self.url_spacy_all_models, 'available_models', post=False)
-
- def set_current_spacy_model(self, model):
- return get_data_by_key_from_url(self.url_spacy_set_model, '', post=True, data=json.dumps({'model_name':model}))
-
- def get_named_ent_from_text(self, txt):
- resp = get_data_by_key_from_url(self.url_spacy_get_NE, 'result', post=False, data=json.dumps({'txt':txt}))
- logger.debug(f'url is {resp}, response is {resp}')
- if NER_RESPONSE_KEY in resp.keys():
- named_ents = resp.get(NER_RESPONSE_KEY)
- if named_ents:
- return json.loads(named_ents)
- else:
- logger.info(f'NER result is empty: {named_ents}')
- return []
- else:
- raise JSONGetKeyException(NER_RESPONSE_KEY)
-
- def get_text_from_file(self, fn):
- try:
- txt = get_data_by_key_from_url(self.url_rcg, 'text', post=True, file_name=fn)
- return txt
-
- except Exception as e:
- logger.error(f'error while extract text from file {fn}')
- logger.exception(e)
-
- def set_tesseract_config(self, conf):
- return get_data_by_key_from_url(self.url_tsrct_cfg, '', post=True, data=json.dumps(conf))
-
-if __name__ == '__main__':
- piper_worker = PiperNLPWorker('http://localhost:8788')
-
-
- amodels = piper_worker.get_available_ner_models()
- print('all models', amodels)
-
- # model = amodels[0]
- model = 'en_core_web_sm'
- ok = piper_worker.set_current_spacy_model(model)
- # print(ok, ok.text)
- if ok:
- print('model set!')
- else:
- print('model does not set')
- sys.exit()
-
- txt = 'The Alraigo Incident occurred on 6th June 1983, when a lost British Royal Navy Sea Harrier fighter aircraft landed on the deck of a Spanish container ship.[1][2] Its pilot, Sub-lieutenant Ian Watson, was a junior Royal Navy Pilot undertaking his first NATO exercise from HMS Illustrious, which was operating off the coast of Portugal. Watson was launched in a pair of aircraft tasked with locating a French aircraft carrier under combat conditions including radio-silence and radar switched off.'
- try:
- resp = piper_worker.get_named_ent_from_text(txt)
- except JSONGetKeyException as e:
- logger.exception(e)
- # pprint(resp)
-
-
- txt = piper_worker.get_text_from_file('/home/pavel/repo/piper_new/piper/tests/ocr_data.pdf')
- logger.info(f'txt {txt}')
-
-
- ts_conf = dict()
- ts_conf['ts_lang'] = 'eng'
- ts_conf['ts_config_row'] = rf'--oem 1 --psm 6'
-
- resp = piper_worker.set_tesseract_config(ts_conf)
- logger.info(resp)
-
-
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/base_executor_test.py b/tests/base_executor_test.py
new file mode 100644
index 0000000..4a5b7af
--- /dev/null
+++ b/tests/base_executor_test.py
@@ -0,0 +1,52 @@
+import asyncio
+import pytest
+
+from piper.envs import CurrentEnv, Env
+from piper.base.executors import BaseExecutor
+
+
+class ExecutorImpl(BaseExecutor):
+ def run(self, x: int) -> int:
+ return x + 1
+ async def exec(self, x: int) -> int:
+ return x - 1
+
+
+inst = ExecutorImpl()
+loop = asyncio.get_event_loop()
+
+
+class TestBaseExecutorClass:
+
+ def test_executor_not_async(self):
+ assert not inst.is_async
+
+ def test_executor_run(self):
+ with CurrentEnv():
+ assert inst(10) == 11
+
+ def test_executor_exec_error(self):
+ inst.is_async = True
+ # it is coroutine must be an error here
+ with pytest.raises(Exception):
+ result = inst.exec(10)
+ result + 1
+ inst.is_async = False
+
+ def test_executor_exec(self):
+ inst.is_async = True
+ with CurrentEnv():
+ assert loop.run_until_complete(inst(10)) == 9
+ inst.is_async = False
+
+ def test_custom_env_run(self):
+ class CustomEnv(Env):
+ name = "custom_env"
+
+ with CustomEnv():
+ with pytest.raises(Exception):
+ inst(10)
+
+ # add implementation
+ inst.custom_env_run = lambda x: x * 10
+ assert inst(10) == 100
diff --git a/tests/base_test.py b/tests/base_test.py
new file mode 100644
index 0000000..8ee3d26
--- /dev/null
+++ b/tests/base_test.py
@@ -0,0 +1,60 @@
+# pytest -vs tests/base_test.py::TestPiperBase
+import asyncio
+
+from piper.base.executors import FastAPIExecutor
+from piper.services import StringValue
+from piper.configurations import get_configuration
+from piper.envs import CurrentEnv, DockerEnv, ComposeEnv
+
+cfg = get_configuration()
+loop = asyncio.get_event_loop()
+
+
+class TestMessageAdder(FastAPIExecutor):
+
+ def __init__(self, appender="TEST", **kwargs):
+ self.appender = appender
+ super().__init__(**kwargs)
+
+ async def run(self, message: StringValue) -> StringValue:
+ return StringValue(value=(message.value + self.appender))
+
+
+class TestPiperBase:
+
+ def test_simple_async_executors(self):
+ init_value = "TEST_container_"
+ x = StringValue(value=init_value)
+ need_result = f'{init_value}TEST'
+
+ with CurrentEnv() as env:
+ adder = TestMessageAdder(port=cfg.docker_app_port)
+ result = loop.run_until_complete(adder.aio_call(x))
+ assert result.value == need_result
+
+ with DockerEnv() as env:
+ adder = TestMessageAdder(port=cfg.docker_app_port)
+ result = loop.run_until_complete(adder.aio_call(x))
+ adder.rm_container()
+ assert result.get("value") == need_result
+
+ def test_two_sync_executors(self):
+ init_value = "TEST_container_"
+ x = StringValue(value=init_value)
+ need_result = f'{init_value}TESTTEST'
+
+ with CurrentEnv() as env:
+ adder_1 = TestMessageAdder(port=cfg.docker_app_port)
+ adder_2 = TestMessageAdder(port=cfg.docker_app_port+1)
+ result = adder_1(x)
+ result = adder_2(result)
+
+ assert result.value == need_result
+
+ # with DockerEnv() as env:
+ # adder_1 = TestMessageAdder(port=cfg.docker_app_port+10)
+ # adder_2 = TestMessageAdder(port=cfg.docker_app_port+11)
+ # result = adder_1(x)
+ # result = adder_2(result)
+ #
+ # assert result.value == need_result
diff --git a/tests/container_test.py b/tests/container_test.py
deleted file mode 100644
index bd392cd..0000000
--- a/tests/container_test.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import docker
-import os
-import sys
-import asyncio
-root_dir = os.path.join(os.path.realpath(os.path.pardir), 'piper')
-sys.path.insert(1, root_dir)
-
-from piper.utils import docker_utils as du
-from piper.envs import CurrentEnv
-from piper.envs import is_docker_env
-from piper.configurations import get_configuration
-from piper.base.executors import copy_piper, copy_scripts
-from piper.services import TestMessageAdder, StringValue
-
-def test_start_container():
- cfg = get_configuration()
- loop = asyncio.get_event_loop()
- init_value = "TEST_container_"
- x = StringValue(value=init_value)
- need_result = f'{init_value}TEST'
- adder = TestMessageAdder(appender="!", port=cfg.docker_app_port)
- result = loop.run_until_complete(adder(x))
- print(result)
- adder.rm_container()
-
- assert result.get('value') == need_result
diff --git a/tests/domains/FaceDetection/face_detection_test.py b/tests/domains/FaceDetection/face_detection_test.py
new file mode 100644
index 0000000..d3e1a3f
--- /dev/null
+++ b/tests/domains/FaceDetection/face_detection_test.py
@@ -0,0 +1,103 @@
+# pytest -vs tests/domains/FaceDetection/face_detection_test.py::TestFaceDetection::test_detection
+# pytest -vs tests/domains/FaceDetection/face_detection_test.py::TestFaceDetection::test_health_check
+import os
+import sys
+import requests
+root_dir = os.path.join(os.path.realpath(os.path.pardir), 'piper')
+sys.path.insert(1, root_dir)
+
+from pathlib import Path
+import os
+import pytest
+from loguru import logger
+import cv2
+import base64
+import numpy as np
+import json
+
+main_app_url = f'http://localhost:8788'
+file_path = Path(__file__).parent
+
+def send_file_to_service(url, file_path):
+ '''
+ Send file by path to service URL
+ '''
+ multipart_form_data = {
+ 'file': open(file_path, 'rb')
+ }
+
+ logger.info(f'url: {url}')
+ logger.info(f'data: {multipart_form_data}')
+
+ try:
+ result = requests.post(url, files=multipart_form_data, verify=False)
+ return result
+
+ except requests.exceptions.ConnectionError as ce:
+ logger.error(f'exeption while connect to {url}')
+ logger.error(ce)
+
+def draw_bb_on_initial_img(img, detections):
+ '''
+ Draw founded bounded boxes on initial image
+ '''
+ for detect_dict in detections:
+ bbox = detect_dict.get('box')
+ x, y, w, h = bbox
+ cv2.rectangle(img, (x, y), (x+w, y+h), (255,0,0), 2)
+ return img
+
+
+class TestFaceDetection():
+ '''
+ Docker container API test. Methods:
+ test_detection
+ health_check
+ '''
+
+ def test_detection(self):
+ '''
+ jpg file recognize test
+ '''
+ fn = file_path.joinpath('faces.jpg')
+ # fn = file_path.joinpath('ocr_data.jpg')
+ url = f'{main_app_url}/recognize'
+
+ received_data = send_file_to_service(url, fn)
+
+ assert received_data.status_code == 200, received_data.text
+
+ try:
+ detections = received_data.json()
+ logger.info(f'received_data.json {detections}')
+
+ logger.info('data', detections)
+ assert len(detections) != 0
+
+ if detections:
+ initial_img = cv2.imread(str(fn))
+ img_with_faces = draw_bb_on_initial_img(initial_img, detections)
+
+ out_img_fn = str(file_path.joinpath('result_image.jpg'))
+ write_result = cv2.imwrite(out_img_fn, img_with_faces)
+ if write_result:
+ logger.info(f'img with detections saved to {out_img_fn}')
+ else:
+ logger.error(f'img with detections did not save')
+
+
+ except Exception as e:
+ pytest.raises(Exception)
+ assert False
+
+
+ def test_health_check(self):
+ '''
+ health check test
+ '''
+ url = f'{main_app_url}/health_check'
+ print(url)
+ # убрать параметры
+ result = requests.post(url, data=json.dumps({"1":"2"}), headers= {'Content-Type': 'application/json'})
+ logger.info('health_check test')
+ assert result.status_code == 200
diff --git a/tests/domains/FaceDetection/faces.jpg b/tests/domains/FaceDetection/faces.jpg
new file mode 100755
index 0000000..5d87e2a
Binary files /dev/null and b/tests/domains/FaceDetection/faces.jpg differ
diff --git a/tests/domains/FaceDetection/result_image.jpg b/tests/domains/FaceDetection/result_image.jpg
new file mode 100644
index 0000000..be10642
Binary files /dev/null and b/tests/domains/FaceDetection/result_image.jpg differ
diff --git a/tests/ocr_data.jpg b/tests/domains/TesseractOCR/data/ocr_data.jpg
old mode 100644
new mode 100755
similarity index 100%
rename from tests/ocr_data.jpg
rename to tests/domains/TesseractOCR/data/ocr_data.jpg
diff --git a/tests/domains/TesseractOCR/data/ocr_data.pdf b/tests/domains/TesseractOCR/data/ocr_data.pdf
new file mode 100755
index 0000000..fc17c9a
Binary files /dev/null and b/tests/domains/TesseractOCR/data/ocr_data.pdf differ
diff --git a/tests/ocr_ner.png b/tests/domains/TesseractOCR/data/ocr_ner.png
old mode 100644
new mode 100755
similarity index 100%
rename from tests/ocr_ner.png
rename to tests/domains/TesseractOCR/data/ocr_ner.png
diff --git a/tests/domains/TesseractOCR/tesseract_ocr_test.py b/tests/domains/TesseractOCR/tesseract_ocr_test.py
new file mode 100644
index 0000000..656b272
--- /dev/null
+++ b/tests/domains/TesseractOCR/tesseract_ocr_test.py
@@ -0,0 +1,80 @@
+# pytest -vs tests/domains/TesseractOCR/tesseract_ocr_test.py::TestTesseract
+# pytest -vs tests/domains/TesseractOCR/tesseract_ocr_test.py::TestTesseract::test_recognizer_jpg
+
+import os
+import sys
+import requests
+root_dir = os.path.join(os.path.realpath(os.path.pardir), 'piper')
+sys.path.insert(1, root_dir)
+
+from piper.utils import docker_utils as du
+from piper.utils.TesseractOCR import tesseract_ocr as tu
+
+from piper.envs import DockerEnv
+from piper.envs import is_docker_env
+from piper.configurations import get_configuration
+from piper.services import TesseractRecognizer, StringValue
+from pathlib import Path
+import os
+import pytest
+
+
+main_app_url = f'http://localhost:8788'
+file_path = Path(__file__).parent
+data_path = file_path.joinpath('data')
+
+class TestTesseract():
+ '''
+ Docker container API test. Methods:
+ test_recognizer_jpg
+ test_recognizer_pdf
+ health_check
+ '''
+
+ def test_recognizer_jpg(self):
+ '''
+ jpg file recognize test
+ '''
+ fn = data_path.joinpath('ocr_data.jpg')
+ url = f'{main_app_url}/recognize'
+
+ result = tu.send_file_to_ocr_service(url, fn)
+ print('result.status_code', result.status_code)
+ # assert result is None
+
+ assert result.status_code == 200
+ try:
+ data = result.json()
+ print('data', " ".join([x for x in data.get('text') if x]))
+ assert len(data) != 0
+ except Exception as e:
+ pytest.raises(Exception)
+
+
+ def test_recognizer_pdf(self):
+ '''
+ pdf file recognize test
+ '''
+ fn = data_path.joinpath('ocr_data.pdf')
+ url = f'{main_app_url}/recognize'
+
+ result = tu.send_file_to_ocr_service(url, fn)
+ print('result.status_code', result.status_code)
+ assert result.status_code == 200
+ try:
+ data = result.json()
+ print('data: ', " ".join([x for x in data.get('text') if x]))
+ assert len(data) != 0
+ except Exception as e:
+ pytest.raises(Exception)
+
+
+ def test_health_check(self):
+ '''
+ health check test
+ '''
+ url = f'{main_app_url}/health_check'
+ print(url)
+ result = requests.post(url)
+ print('result.status_code', result.status_code)
+ assert result.status_code == 200
diff --git a/tests/envs_test.py b/tests/envs_test.py
new file mode 100644
index 0000000..1ca9658
--- /dev/null
+++ b/tests/envs_test.py
@@ -0,0 +1,68 @@
+# pytest -vs tests/envs_test.py::TestDocker
+import time
+import requests
+
+from piper.envs import ComposeEnv, VirtualEnv
+
+main_app_url = f'http://localhost:8788'
+
+
+class TestDocker:
+ """
+ Docker container API test. Methods:
+ health_check
+ run
+ """
+
+ def test_health_check(self):
+ url = f'{main_app_url}/health_check'
+ print(url)
+ result = requests.post(url)
+ assert result.status_code == 200
+
+ def test_run(self):
+ url = f'{main_app_url}/run'
+ print(url)
+ data = {'value': 'hello, world'}
+ response = requests.post(url, json=data)
+ result = dict(response.json())
+ need_result = f"{data.get('value')}TEST"
+ print(f'need_result is {need_result}')
+ print(f"docker result is {result.get('value')}")
+
+ assert response.status_code == 200
+ assert need_result == result.get('value')
+
+
+class TestVenv:
+ """
+ venv test. Methods:
+ scenario
+ """
+
+ def test_scenario(self):
+ with VirtualEnv() as env:
+ env.copy_struct_project()
+ env.create_files_for_venv()
+ env.create_files_for_tests()
+
+
+class TestCompose:
+ """
+ Compose test. Methods:
+ scenario
+ """
+
+ def test_health_check(self):
+ with ComposeEnv() as compose:
+ compose.copy_struct_project()
+ compose.create_files_for_compose()
+ compose.start_compose()
+
+ time.sleep(5)
+ url = 'http://127.0.0.1:7585/health_check'
+ result = requests.get(url)
+ assert result.status_code == 200
+
+ compose.stop_compose()
+
diff --git a/tests/import_test.py b/tests/import_test.py
new file mode 100644
index 0000000..c782663
--- /dev/null
+++ b/tests/import_test.py
@@ -0,0 +1,62 @@
+import pytest
+
+
+class TestSafeImport:
+
+ def test_safe_import_after_piper(self):
+ import piper
+ import foo
+ from piper.imports import PiperDummyModule
+ assert isinstance(foo, (PiperDummyModule, ))
+
+ def test_safe_import_before_piper(self):
+ # it doesn't work normally
+ with pytest.raises(ImportError):
+ import foo
+ import piper
+
+ def test_safe_import_after_executors_import(self):
+ from piper.base import executors
+ import foo
+ from piper.imports import PiperDummyModule
+ assert isinstance(foo, (PiperDummyModule, ))
+
+ def test_safe_import_after_base_executor(self):
+ from piper.base.executors import BaseExecutor
+ import foo
+ from piper.imports import PiperDummyModule
+ assert isinstance(foo, (PiperDummyModule, ))
+
+ def test_safe_import_after_piper_as_p(self):
+ import piper as p
+ import foo
+ from piper.imports import PiperDummyModule
+ assert isinstance(foo, (PiperDummyModule,))
+
+ def test_not_ignore_error_by_flag(self):
+ import piper as p
+ p.configurations.Configuration.ignore_import_errors = False
+ with pytest.raises(ImportError):
+ import foo
+
+ def test_safe_import_as_context_when_global_off(self):
+ import piper as p
+ p.configurations.Configuration.ignore_import_errors = False
+ with p.imports.safe_import():
+ import foo
+ from piper.imports import PiperDummyModule
+ assert isinstance(foo, (PiperDummyModule,))
+ # safe import was deactivated
+ with pytest.raises(ImportError):
+ import bar
+
+ def test_safe_import_as_context_when_global_on(self):
+ import piper as p
+ p.configurations.Configuration.ignore_import_errors = True
+ with p.imports.safe_import():
+ import foo
+ from piper.imports import PiperDummyModule
+ assert isinstance(foo, (PiperDummyModule,))
+ # safe import was deactivated but global is on
+ import bar
+ assert isinstance(bar, (PiperDummyModule,))
\ No newline at end of file
diff --git a/tests/ocr/ocr_data.jpg b/tests/ocr/ocr_data.jpg
new file mode 100644
index 0000000..a585891
Binary files /dev/null and b/tests/ocr/ocr_data.jpg differ
diff --git a/tests/ocr_data.pdf b/tests/ocr/ocr_data.pdf
similarity index 100%
rename from tests/ocr_data.pdf
rename to tests/ocr/ocr_data.pdf
diff --git a/tests/ocr/ocr_ner.png b/tests/ocr/ocr_ner.png
new file mode 100644
index 0000000..cbe6fa7
Binary files /dev/null and b/tests/ocr/ocr_ner.png differ
diff --git a/tests/tsrct_test.py b/tests/ocr/tsrct_test.py
similarity index 79%
rename from tests/tsrct_test.py
rename to tests/ocr/tsrct_test.py
index 3e55857..9254634 100644
--- a/tests/tsrct_test.py
+++ b/tests/ocr/tsrct_test.py
@@ -1,77 +1,77 @@
-import os
-import sys
-import asyncio
-import requests
-root_dir = os.path.join(os.path.realpath(os.path.pardir), 'piper')
-sys.path.insert(1, root_dir)
-
-from piper.utils import docker_utils as du
-from piper.utils import tesrct_utils as tu
-
-from piper.envs import DockerEnv
-from piper.envs import is_docker_env
-from piper.configurations import get_configuration
-from piper.services import TesseractRecognizer, StringValue
-from pathlib import Path
-import os
-import pytest
-
-
-main_app_url = f'http://localhost:8788'
-file_path = Path(__file__).parent
-
-# pytest -vs tests/tsrct_test.py::TestTesseract::test_recognizer
-class TestTesseract():
- '''
- Docker container API test. Methods:
- test_recognizer_jpg
- test_recognizer_pdf
- health_check
- '''
-
- def test_recognizer_jpg(self):
- '''
- jpg file recognize test
- '''
- fn = file_path.joinpath('ocr_data.jpg')
- url = f'{main_app_url}/recognize'
-
- result = tu.send_file_to_service(url, fn)
- print('result.status_code', result.status_code)
- # assert result is None
-
- assert result.status_code == 200
- try:
- data = result.json()
- print('data', data)
- assert len(data) != 0
- except Exception as e:
- pytest.raises(Exception)
-
-
- def test_recognizer_pdf(self):
- '''
- pdf file recognize test
- '''
- fn = file_path.joinpath('ocr_data.pdf')
- url = f'{main_app_url}/recognize'
-
- result = tu.send_file_to_service(url, fn)
- print(result.status_code)
- assert result.status_code == 200
- try:
- data = result.json()
- print('data', data)
- assert len(data) != 0
- except Exception as e:
- pytest.raises(Exception)
-
-
- def test_health_check(self):
- '''
- health check test
- '''
- url = f'{main_app_url}/health_check'
- print(url)
- result = requests.post(url)
- assert result.status_code == 200
+import asyncio
+import os
+import sys
+
+import requests
+
+root_dir = os.path.join(os.path.realpath(os.path.pardir), '../../piper')
+sys.path.insert(1, root_dir)
+
+import os
+from pathlib import Path
+
+import pytest
+
+from piper.configurations import get_configuration
+from piper.envs import DockerEnv, is_docker_env
+from piper.services import StringValue, TesseractRecognizer
+from piper.utils import docker_utils as du
+from piper.utils import tesrct_utils as tu
+
+main_app_url = f'http://localhost:8788'
+file_path = Path(__file__).parent
+
+
+# pytest -vs tests/tsrct_test.py::TestTesseract::test_recognizer
+class TestTesseract:
+ """
+ Docker container API test. Methods:
+ test_recognizer_jpg
+ test_recognizer_pdf
+ health_check
+ """
+
+ def test_recognizer_jpg(self):
+ """
+ jpg file recognize test
+ """
+ fn = file_path.joinpath('ocr_data.jpg')
+ url = f'{main_app_url}/recognize'
+
+ result = tu.send_file_to_service(url, fn)
+ print('result.status_code', result.status_code)
+ # assert result is None
+
+ assert result.status_code == 200
+ try:
+ data = result.json()
+ print('data', data)
+ assert len(data) != 0
+ except Exception as e:
+ pytest.raises(Exception)
+
+ def test_recognizer_pdf(self):
+ """
+ pdf file recognize test
+ """
+ fn = file_path.joinpath('ocr_data.pdf')
+ url = f'{main_app_url}/recognize'
+
+ result = tu.send_file_to_service(url, fn)
+ print(result.status_code)
+ assert result.status_code == 200
+ try:
+ data = result.json()
+ print('data', data)
+ assert len(data) != 0
+ except Exception as e:
+ pytest.raises(Exception)
+
+ def test_health_check(self):
+ """
+ health check test
+ """
+ url = f'{main_app_url}/health_check'
+ print(url)
+ result = requests.post(url)
+ assert result.status_code == 200
diff --git a/tests/running_piper_test.py b/tests/running_piper_test.py
deleted file mode 100644
index 0e6af40..0000000
--- a/tests/running_piper_test.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# pytest -vs tests/running_piper_test.py::TestDocker
-import requests
-
-main_app_url = f'http://localhost:8788'
-
-class TestDocker():
- '''
- Docker container API test. Methods:
- health_check
- run
- '''
- def test_health_check(self):
- url = f'{main_app_url}/health_check'
- print(url)
- result = requests.post(url)
- assert result.status_code == 200
-
- def test_run(self):
- url = f'{main_app_url}/run'
- print(url)
- data = {'value': 'hello, world'}
- response = requests.post(url, json=data)
- result = dict(response.json())
- need_result = f"{data.get('value')}TEST"
- print(f'need_result is {need_result}')
- print(f"docker result is {result.get('value')}")
-
- assert response.status_code == 200
- assert need_result == result.get('value')
-
-
-class TestVenv():
- '''
- venv container API test. Methods:
- dummy
- '''
- def test_dummy(self):
- assert 1 / 0
\ No newline at end of file
diff --git a/tests/use_case_folder_processing.py b/tests/use_case_folder_processing.py
deleted file mode 100644
index 136a5f3..0000000
--- a/tests/use_case_folder_processing.py
+++ /dev/null
@@ -1,85 +0,0 @@
-import os
-import sys
-import asyncio
-import requests
-root_dir = os.path.join(os.path.realpath(os.path.pardir), 'piper')
-sys.path.insert(1, root_dir)
-
-from piper.utils import docker_utils as du
-from piper.utils import tesrct_utils as tu
-
-from piper.envs import DockerEnv
-from piper.envs import is_docker_env
-from piper.configurations import get_configuration
-from piper.services import TesseractRecognizer, StringValue
-from pathlib import Path
-import os
-import pytest
-import json
-
-from PiperOperator import *
-
-# service url
-main_app_url = f'http://localhost:8788'
-
-# folder info
-file_path = Path(__file__).parent
-# fn = file_path.joinpath('ocr_data.jpg')
-
-SOURCE_FOLDER = file_path
-OUTPUT_FOLDER = file_path.joinpath('out')
-
-
-if __name__ == '__main__':
- cfg = get_configuration()
-
- # PiperWorker instanse
- piper_worker = PiperNLPWorker(main_app_url)
-
- available_models = piper_worker.get_available_ner_models()
- logger.info(f'available models are {available_models}')
-
- ts_conf = dict()
- ts_conf['ts_lang'] = 'eng'
-
- for oem in cfg.available_OEM:
- for psm in cfg.available_PSM:
- # change tesseract config
- ts_conf['ts_config_row'] = rf'--oem {oem} --psm {psm}'
- piper_worker.set_tesseract_config(ts_conf)
- logger.info(f"\ttesseract config changed to {ts_conf['ts_config_row']}")
-
- for model in available_models:
- # change spacy model
- piper_worker.set_current_spacy_model(model)
- logger.info(f"\t\tspacy model changed to {model}")
-
- # create output folder
- cur_dir = OUTPUT_FOLDER.joinpath(f'oem_{oem}_psm_{psm}_{model}')
- cur_dir.mkdir(parents=True, exist_ok=True)
- for fn in file_path.iterdir():
- if fn.suffix[1:] in ['jpg', 'jpeg', 'png', 'pdf']:
- # folder processing
- txt = piper_worker.get_text_from_file(fn)
- if txt:
- txt = ' '.join(txt)
- logger.info(f'\t\t\textracted text {txt}')
-
- out_fn = cur_dir.joinpath(f'res_{fn.stem}.txt')
- with open(out_fn, 'w') as f:
- f.write(txt)
-
- logger.debug(f'get NEs from text: {txt}')
- named_ents = piper_worker.get_named_ent_from_text(txt)
- logger.debug(f'NEs are: {named_ents}')
-
- if named_ents:
- named_ents_str = "\n".join(f'\t\t\t{x}' for x in named_ents)
- logger.info(f'\t\t\textract_named_ents {named_ents_str}')
-
- # out_fn = cur_dir.joinpath(f'res_{fn.stem}.txt')
- with open(out_fn, 'a') as f:
- f.write('\n')
- f.write(named_ents_str)
- else:
- logger.info(f'\t\t\tNO extracted text')
diff --git a/usecases/OCR.ipynb b/usecases/OCR.ipynb
new file mode 100644
index 0000000..abae45c
--- /dev/null
+++ b/usecases/OCR.ipynb
@@ -0,0 +1,236 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "f2ba6da6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# TODO: install from pypi or nexus\n",
+ "!cp -R ../piper/ ./piper/"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "afd5d0d6",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Collecting docker\n",
+ " Downloading docker-5.0.3-py2.py3-none-any.whl (146 kB)\n",
+ " |████████████████████████████████| 146 kB 171 kB/s \n",
+ "\u001b[?25hCollecting aiohttp\n",
+ " Downloading aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.2 MB)\n",
+ " |████████████████████████████████| 1.2 MB 399 kB/s \n",
+ "\u001b[?25hRequirement already satisfied: Jinja2 in /opt/conda/lib/python3.9/site-packages (from -r ../requirements.txt (line 3)) (3.0.3)\n",
+ "Collecting pydantic\n",
+ " Downloading pydantic-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (12.2 MB)\n",
+ " |████████████████████████████████| 12.2 MB 1.7 MB/s \n",
+ "\u001b[?25hCollecting loguru\n",
+ " Downloading loguru-0.6.0-py3-none-any.whl (58 kB)\n",
+ " |████████████████████████████████| 58 kB 2.5 MB/s \n",
+ "\u001b[?25hRequirement already satisfied: websocket-client>=0.32.0 in /opt/conda/lib/python3.9/site-packages (from docker->-r ../requirements.txt (line 1)) (1.2.3)\n",
+ "Requirement already satisfied: requests!=2.18.0,>=2.14.2 in /opt/conda/lib/python3.9/site-packages (from docker->-r ../requirements.txt (line 1)) (2.27.1)\n",
+ "Collecting aiosignal>=1.1.2\n",
+ " Downloading aiosignal-1.2.0-py3-none-any.whl (8.2 kB)\n",
+ "Collecting yarl<2.0,>=1.0\n",
+ " Downloading yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (304 kB)\n",
+ " |████████████████████████████████| 304 kB 2.8 MB/s \n",
+ "\u001b[?25hRequirement already satisfied: charset-normalizer<3.0,>=2.0 in /opt/conda/lib/python3.9/site-packages (from aiohttp->-r ../requirements.txt (line 2)) (2.0.10)\n",
+ "Collecting multidict<7.0,>=4.5\n",
+ " Downloading multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (114 kB)\n",
+ " |████████████████████████████████| 114 kB 3.3 MB/s \n",
+ "\u001b[?25hRequirement already satisfied: attrs>=17.3.0 in /opt/conda/lib/python3.9/site-packages (from aiohttp->-r ../requirements.txt (line 2)) (21.4.0)\n",
+ "Collecting frozenlist>=1.1.1\n",
+ " Downloading frozenlist-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (156 kB)\n",
+ " |████████████████████████████████| 156 kB 2.4 MB/s \n",
+ "\u001b[?25hCollecting async-timeout<5.0,>=4.0.0a3\n",
+ " Downloading async_timeout-4.0.2-py3-none-any.whl (5.8 kB)\n",
+ "Requirement already satisfied: MarkupSafe>=2.0 in /opt/conda/lib/python3.9/site-packages (from Jinja2->-r ../requirements.txt (line 3)) (2.0.1)\n",
+ "Requirement already satisfied: typing-extensions>=3.7.4.3 in /opt/conda/lib/python3.9/site-packages (from pydantic->-r ../requirements.txt (line 4)) (4.0.1)\n",
+ "Requirement already satisfied: certifi>=2017.4.17 in /opt/conda/lib/python3.9/site-packages (from requests!=2.18.0,>=2.14.2->docker->-r ../requirements.txt (line 1)) (2021.10.8)\n",
+ "Requirement already satisfied: idna<4,>=2.5 in /opt/conda/lib/python3.9/site-packages (from requests!=2.18.0,>=2.14.2->docker->-r ../requirements.txt (line 1)) (3.3)\n",
+ "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /opt/conda/lib/python3.9/site-packages (from requests!=2.18.0,>=2.14.2->docker->-r ../requirements.txt (line 1)) (1.26.8)\n",
+ "Installing collected packages: multidict, frozenlist, yarl, async-timeout, aiosignal, pydantic, loguru, docker, aiohttp\n",
+ "Successfully installed aiohttp-3.8.1 aiosignal-1.2.0 async-timeout-4.0.2 docker-5.0.3 frozenlist-1.3.0 loguru-0.6.0 multidict-6.0.2 pydantic-1.9.0 yarl-1.7.2\n"
+ ]
+ }
+ ],
+ "source": [
+ "!pip install -r ../requirements.txt"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "id": "223467c6",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Collecting fastapi\n",
+ " Downloading fastapi-0.74.1-py3-none-any.whl (53 kB)\n",
+ " |████████████████████████████████| 53 kB 726 kB/s \n",
+ "\u001b[?25hCollecting starlette==0.17.1\n",
+ " Downloading starlette-0.17.1-py3-none-any.whl (58 kB)\n",
+ " |████████████████████████████████| 58 kB 2.0 MB/s \n",
+ "\u001b[?25hRequirement already satisfied: pydantic!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<2.0.0,>=1.6.2 in /opt/conda/lib/python3.9/site-packages (from fastapi) (1.9.0)\n",
+ "Requirement already satisfied: anyio<4,>=3.0.0 in /opt/conda/lib/python3.9/site-packages (from starlette==0.17.1->fastapi) (3.5.0)\n",
+ "Requirement already satisfied: typing-extensions>=3.7.4.3 in /opt/conda/lib/python3.9/site-packages (from pydantic!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<2.0.0,>=1.6.2->fastapi) (4.0.1)\n",
+ "Requirement already satisfied: sniffio>=1.1 in /opt/conda/lib/python3.9/site-packages (from anyio<4,>=3.0.0->starlette==0.17.1->fastapi) (1.2.0)\n",
+ "Requirement already satisfied: idna>=2.8 in /opt/conda/lib/python3.9/site-packages (from anyio<4,>=3.0.0->starlette==0.17.1->fastapi) (3.3)\n",
+ "Installing collected packages: starlette, fastapi\n",
+ "Successfully installed fastapi-0.74.1 starlette-0.17.1\n"
+ ]
+ }
+ ],
+ "source": [
+ "!pip install fastapi"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "id": "313e9b33",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Requirement already satisfied: opencv-python in /opt/conda/lib/python3.9/site-packages (4.5.5.62)\n",
+ "Collecting pytesseract\n",
+ " Downloading pytesseract-0.3.9-py2.py3-none-any.whl (14 kB)\n",
+ "Requirement already satisfied: numpy>=1.19.3 in /opt/conda/lib/python3.9/site-packages (from opencv-python) (1.21.5)\n",
+ "Requirement already satisfied: packaging>=21.3 in /opt/conda/lib/python3.9/site-packages (from pytesseract) (21.3)\n",
+ "Requirement already satisfied: Pillow>=8.0.0 in /opt/conda/lib/python3.9/site-packages (from pytesseract) (8.4.0)\n",
+ "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /opt/conda/lib/python3.9/site-packages (from packaging>=21.3->pytesseract) (3.0.6)\n",
+ "Installing collected packages: pytesseract\n",
+ "Successfully installed pytesseract-0.3.9\n"
+ ]
+ }
+ ],
+ "source": [
+ "!pip install opencv-python pytesseract"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "id": "4261648a",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from piper.services import TestMessageAdder, StringValue, TesseractRecognizer\n",
+ "from piper.envs import CurrentEnv, DockerEnv\n",
+ "from piper.configurations import get_configuration\n",
+ "import time\n",
+ "import asyncio"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "id": "0e62bf01",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Entering CurrentEnv\n",
+ "Setting environment to: \n",
+ "get_env() \n",
+ "is_current_env() True\n",
+ "value='hello, world!'\n",
+ "Exiting CurrentEnv\n",
+ "Setting environment to: \n"
+ ]
+ }
+ ],
+ "source": [
+ "cfg = get_configuration()\n",
+ "loop = asyncio.get_event_loop()\n",
+ "with CurrentEnv() as env:\n",
+ " x = StringValue(value=\"hello, world\")\n",
+ " adder = TestMessageAdder(appender=\"!\", port=cfg.docker_app_port)\n",
+ " result = await adder(x)\n",
+ " print(result)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 23,
+ "id": "80499ace",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Entering CurrentEnv\n",
+ "Setting environment to: \n",
+ "get_env() \n",
+ "is_current_env() True\n",
+ "1\n",
+ "{1: 1}\n",
+ "Exiting CurrentEnv\n",
+ "Setting environment to: \n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "/tmp/ipykernel_29/3635331722.py:3: RuntimeWarning: coroutine 'HTTPExecutor.__call__' was never awaited\n",
+ " result = await recognizer(\"../tests/ocr_data.jpg\")\n",
+ "RuntimeWarning: Enable tracemalloc to get the object allocation traceback\n"
+ ]
+ }
+ ],
+ "source": [
+ "# TODO: automate environment\n",
+ "with CurrentEnv() as env:\n",
+ " recognizer = TesseractRecognizer(port=cfg.docker_app_port)\n",
+ " result = await recognizer(\"../tests/ocr_data.jpg\")\n",
+ " print(result)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "34c7db2b",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.7"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}