diff --git a/.github/workflows/rw_get_tests.yaml b/.github/workflows/rw_get_tests.yaml index 8ca9579a..90e4cec6 100644 --- a/.github/workflows/rw_get_tests.yaml +++ b/.github/workflows/rw_get_tests.yaml @@ -56,6 +56,7 @@ jobs: - name: Download shell script for getting path of all test modules if: ${{ inputs.use_customized_shell != true }} + working-directory: ${{ inputs.test_working_directory }} run: curl https://raw.githubusercontent.com/Chisanan232/GitHub-Action_Reusable_Workflows-Python/develop/scripts/ci/get-all-tests.sh --output ${{ inputs.shell_path }} - id: set-matrix diff --git a/.github/workflows/rw_organize_test_cov_reports.yaml b/.github/workflows/rw_organize_test_cov_reports.yaml index 31aa7292..7f08e7a9 100644 --- a/.github/workflows/rw_organize_test_cov_reports.yaml +++ b/.github/workflows/rw_organize_test_cov_reports.yaml @@ -5,6 +5,7 @@ # # Workflow input parameters: # * test_type: The testing type. In generally, it only has 2 options: 'unit-test' and 'integration-test'. +# * test_working_directory: The working directory for test running. # # Workflow running output: # No, but it would save the testing coverage reports (coverage.xml) to provide after-process to organize and record. @@ -24,6 +25,11 @@ on: description: "The testing type. In generally, it only has 2 options: 'unit-test' and 'integration-test'." type: string required: true + test_working_directory: + description: "The working directory for test running." + required: false + type: string + default: './' jobs: @@ -37,7 +43,7 @@ jobs: uses: actions/download-artifact@v4 with: pattern: coverage* - path: ./ + path: ${{ inputs.test_working_directory }} merge-multiple: true - name: Setup Python 3.10 in Ubuntu OS @@ -46,6 +52,7 @@ jobs: python-version: '3.10' - name: Install Python tool 'coverage' + working-directory: ${{ inputs.test_working_directory }} run: | python3 -m pip install --upgrade pip pip3 install -U pip @@ -54,6 +61,7 @@ jobs: ls -la - name: Combine all testing coverage data files with test type and runtime OS, and convert to XML format file finally + working-directory: ${{ inputs.test_working_directory }} run: | curl https://raw.githubusercontent.com/Chisanan232/GitHub-Action_Reusable_Workflows-Python/develop/scripts/ci/combine_coverage_reports.sh --output ./scripts/ci/combine_coverage_reports.sh bash ./scripts/ci/combine_coverage_reports.sh ${{ inputs.test_type }} .coverage. @@ -62,7 +70,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: ${{ inputs.test_type }}_coverage_data_file - path: .coverage + path: ${{ inputs.test_working_directory }}.coverage if-no-files-found: error include-hidden-files: true @@ -70,6 +78,6 @@ jobs: uses: actions/upload-artifact@v4 with: name: ${{ inputs.test_type }}_coverage_xml_report - path: coverage**xml + path: ${{ inputs.test_working_directory }}coverage**xml if-no-files-found: error include-hidden-files: true diff --git a/.github/workflows/rw_poetry_run_test.yaml b/.github/workflows/rw_poetry_run_test.yaml index afae7739..04505c57 100644 --- a/.github/workflows/rw_poetry_run_test.yaml +++ b/.github/workflows/rw_poetry_run_test.yaml @@ -149,6 +149,7 @@ jobs: - name: Setup and run HTTP server for testing if: ${{ inputs.setup_http_server == true }} + working-directory: ${{ inputs.test_working_directory }} run: gunicorn --bind ${{ inputs.http_server_host }}:${{ inputs.http_server_port }} '${{ inputs.http_server_app_module }}:${{ inputs.http_server_enter_point }}' --daemon - name: Test to send HTTP request to sample HTTP server @@ -169,6 +170,6 @@ jobs: uses: actions/upload-artifact@v4 with: name: coverage_${{ inputs.test_type }}_${{ inputs.runtime_os }}_${{ inputs.python_version }} - path: .coverage.${{ inputs.test_type }}.${{ inputs.runtime_os }}-${{ inputs.python_version }} + path: ${{ inputs.test_working_directory }}.coverage.${{ inputs.test_type }}.${{ inputs.runtime_os }}-${{ inputs.python_version }} if-no-files-found: error include-hidden-files: true diff --git a/.github/workflows/rw_poetry_run_test_with_multi_py_versions.yaml b/.github/workflows/rw_poetry_run_test_with_multi_py_versions.yaml index fe628441..38ba9db1 100644 --- a/.github/workflows/rw_poetry_run_test_with_multi_py_versions.yaml +++ b/.github/workflows/rw_poetry_run_test_with_multi_py_versions.yaml @@ -6,6 +6,7 @@ # # Workflow input parameters: # * test_type: The testing type. In generally, it only has 2 options: 'unit-test' and 'integration-test'. +# * test_working_directory: The working directory for test running. # * install_dependency_with_group: Install the dependency by Poetry configuration with dependency group setting. This parameter receive the dependency group naming. # * all_test_items_paths: The target paths of test items under test. # * setup_http_server: If it's true, it would set up and run HTTP server for testing. @@ -34,6 +35,11 @@ on: description: "The testing type. In generally, it only has 2 options: 'unit-test' and 'integration-test'." required: true type: string + test_working_directory: + description: "The working directory for test running." + required: false + type: string + default: './' install_dependency_with_group: description: "Install the dependency by Poetry configuration with dependency group setting. This parameter receive the dependency group naming." type: string @@ -86,6 +92,7 @@ jobs: runtime_os: ${{ matrix.os }} python_version: ${{ matrix.python-version }} test_type: ${{ inputs.test_type }} + test_working_directory: ${{ inputs.test_working_directory }} install_dependency_with_group: ${{ inputs.install_dependency_with_group }} all_test_items_paths: ${{ inputs.all_test_items_paths }} setup_http_server: ${{ inputs.setup_http_server }} diff --git a/.github/workflows/rw_upload_test_cov_report.yaml b/.github/workflows/rw_upload_test_cov_report.yaml index 350a1774..d5e7f6f4 100644 --- a/.github/workflows/rw_upload_test_cov_report.yaml +++ b/.github/workflows/rw_upload_test_cov_report.yaml @@ -7,6 +7,7 @@ # * General arguments: # * download_path: The path to download testing coverage reports via 'actions/download-artifact@v3'. # * test_type: The testing type. In generally, it only has 2 options: 'unit-test' and 'integration-test'. +# * test_working_directory: The working directory for test running. # * upload-to-codecov: If it's true, it would upload testing coverage report for Codecov (https://codecov.io). # * codecov_flags: The flags of the testing coverage report for Codecov. This option would be required if 'upload-to-codecov' is true. # * codecov_name: The name of the testing coverage report for Codecov. This option would be required if 'upload-to-codecov' is true. @@ -37,6 +38,11 @@ on: description: "The testing type. In generally, it only has 2 options: 'unit-test' and 'integration-test'." required: true type: string + test_working_directory: + description: "The working directory for test running." + required: false + type: string + default: './' upload-to-codecov: description: "If it's true, it would upload testing coverage report for Codecov (https://codecov.io)." type: boolean @@ -156,6 +162,7 @@ jobs: - name: Upload coverage report to Coveralls https://coveralls.io if: ${{ inputs.upload-to-coveralls == true }} + working-directory: ${{ inputs.test_working_directory }} env: GITHUB_TOKEN: ${{ secrets.coveralls_token }} run: coveralls --verbose diff --git a/.github/workflows/test_nested_pyproject_ci_multi-tests_by_poetry.yaml b/.github/workflows/test_nested_pyproject_ci_multi-tests_by_poetry.yaml new file mode 100644 index 00000000..519867c3 --- /dev/null +++ b/.github/workflows/test_nested_pyproject_ci_multi-tests_by_poetry.yaml @@ -0,0 +1,136 @@ +name: Nested Python project with Poetry CI Test (multi-tests) + +on: + pull_request: + branches: + - "develop**" + - "master" + paths: +# The workflow self. + - ".github/workflows/test_nested_pyproject_ci_multi-tests_by_poetry.yaml" +# The shell script only be used by this workflow. + - "nested_poetry_project/scripts/ci/check_getting_output.sh" +# The sample Python code. + - "nested_poetry_project/.coveragerc" + - "nested_poetry_project/pyproject.toml" + - "nested_poetry_project/poetry.lock" + - "nested_poetry_project/pytest.ini" + - "nested_poetry_project/nested_python_src/**" + - "nested_poetry_project/test/**" +# The shell scripts or actions this workflow would use. + - ".github/workflows/rw_get_tests.yaml" + - ".github/workflows/rw_poetry_run_test.yaml" + - ".github/workflows/rw_poetry_run_test_with_multi_py_versions.yaml" + - ".github/workflows/rw_organize_test_cov_reports.yaml" + - "nested_poetry_project/scripts/ci/combine_coverage_reports.sh" + - ".github/workflows/rw_upload_test_cov_report.yaml" + - "nested_poetry_project/scripts/ci/check-input-params.sh" + - ".github/workflows/rw_sonarqube_scan.yaml" + - ".github/workflows/rw_pre-building_test.yaml" + - ".github/workflows/rw_build_git-tag_and_create_github-release.yaml" + - "nested_poetry_project/scripts/ci/build_git-tag_or_create_github-release.sh" + - "nested_poetry_project/scripts/ci/deployment_new_version_workflow.sh" + +jobs: + prep-testbed_unit-test: +# name: Prepare all unit test items + uses: ./.github/workflows/rw_get_tests.yaml + with: + test_working_directory: './nested_poetry_project/' + shell_arg: test/unit_test/ + + + prep-testbed_integration-test: +# name: Prepare all integration test items + uses: ./.github/workflows/rw_get_tests.yaml + with: + shell_path: ./scripts/ci/test/get-integration-test-paths.sh + shell_arg: unix + test_working_directory: './nested_poetry_project/' + use_customized_shell: true + + + run_unit-test: +# name: Run all unit test items + needs: prep-testbed_unit-test + uses: ./.github/workflows/rw_poetry_run_test.yaml + with: + test_type: unit-test + test_working_directory: './nested_poetry_project/' + install_dependency_with_group: github-action + all_test_items_paths: ${{needs.prep-testbed_unit-test.outputs.all_test_items}} + + + run_integration-test: +# name: Run all integration test items. This testing would test the code with other resource or system to ensure the features work finely. + needs: prep-testbed_integration-test + uses: ./.github/workflows/rw_poetry_run_test_with_multi_py_versions.yaml + with: + test_type: integration-test + test_working_directory: './nested_poetry_project/' + install_dependency_with_group: github-action + all_test_items_paths: ${{needs.prep-testbed_integration-test.outputs.all_test_items}} + setup_http_server: true + http_server_host: 0.0.0.0 + http_server_port: 30303 + http_server_app_module: test._http_server.app + http_server_enter_point: app + + + all-test_codecov: +# name: Organize and generate the testing report and upload it to Codecov + if: ${{ !contains(github.event.pull_request.labels.*.name, 'dependencies') }} + needs: [run_unit-test, run_integration-test] + uses: ./.github/workflows/rw_organize_test_cov_reports.yaml + with: + test_type: all-test + test_working_directory: './nested_poetry_project/' + + + codecov_finish: +# name: Organize and generate the testing report and upload it to Codecov +# if: github.ref_name == 'release' || github.ref_name == 'master' + needs: [all-test_codecov] + uses: ./.github/workflows/rw_upload_test_cov_report.yaml + secrets: + codecov_token: ${{ secrets.CODECOV_TOKEN }} + with: + test_type: all-test + upload-to-codecov: true + codecov_flags: unit,integration # Required if 'upload-to-codecov' is true + codecov_name: gh_workflow_template # Required if 'upload-to-codecov' is true + + + coveralls_finish: +# name: Organize and generate the testing report and upload it to Coveralls +# if: github.ref_name == 'release' || github.ref_name == 'master' + needs: [all-test_codecov] + uses: ./.github/workflows/rw_upload_test_cov_report.yaml + secrets: + coveralls_token: ${{ secrets.COVERALLS_TOKEN }} + with: + test_type: all-test + test_working_directory: './nested_poetry_project/' + upload-to-coveralls: true + + + codacy_finish: +# name: Upload test report to Codacy to analyse and record code quality + needs: [all-test_codecov] + uses: ./.github/workflows/rw_upload_test_cov_report.yaml + secrets: + codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }} + with: + test_type: all-test + upload-to-codacy: true + + + sonarqube_finish: +# name: Trigger SoarQube service to scan and analyse project + needs: [all-test_codecov] + uses: ./.github/workflows/rw_sonarqube_scan.yaml + secrets: + sonar_token: ${{ secrets.SONAR_TOKEN }} + with: + test_type: all-test +# download_path: ${{ inputs.download_path }} diff --git a/nested_poetry_project/.coveragerc b/nested_poetry_project/.coveragerc new file mode 100644 index 00000000..65d4953a --- /dev/null +++ b/nested_poetry_project/.coveragerc @@ -0,0 +1,7 @@ +[run] +parallel = True +relative_files = True +include=./test_gh_workflow + +omit = + */__init__.py diff --git a/nested_poetry_project/nested_python_src/__init__.py b/nested_poetry_project/nested_python_src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/nested_poetry_project/nested_python_src/__pkg_info__.py b/nested_poetry_project/nested_python_src/__pkg_info__.py new file mode 100644 index 00000000..02ca93b3 --- /dev/null +++ b/nested_poetry_project/nested_python_src/__pkg_info__.py @@ -0,0 +1 @@ +__version__ = "0.2.0-alpha1.post1" diff --git a/nested_poetry_project/nested_python_src/sample.py b/nested_poetry_project/nested_python_src/sample.py new file mode 100644 index 00000000..682c3790 --- /dev/null +++ b/nested_poetry_project/nested_python_src/sample.py @@ -0,0 +1,3 @@ + +def hello_python() -> str: + return "Hello Python" diff --git a/nested_poetry_project/poetry.lock b/nested_poetry_project/poetry.lock new file mode 100644 index 00000000..2af32141 --- /dev/null +++ b/nested_poetry_project/poetry.lock @@ -0,0 +1,433 @@ +# This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand. + +[[package]] +name = "certifi" +version = "2022.12.7" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.1.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +groups = ["dev"] +files = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "6.5.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "coveralls" +version = "3.3.1" +description = "Show coverage stats online via coveralls.io" +optional = false +python-versions = ">= 3.5" +groups = ["dev"] +files = [ + {file = "coveralls-3.3.1-py2.py3-none-any.whl", hash = "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026"}, + {file = "coveralls-3.3.1.tar.gz", hash = "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea"}, +] + +[package.dependencies] +coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0" +docopt = ">=0.6.1" +requests = ">=1.0.0" + +[package.extras] +yaml = ["PyYAML (>=3.10)"] + +[[package]] +name = "docopt" +version = "0.6.2" +description = "Pythonic argument parser, that will make you smile" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.1.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, + {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "gunicorn" +version = "20.1.0" +description = "WSGI HTTP Server for UNIX" +optional = false +python-versions = ">=3.5" +groups = ["github-action"] +files = [ + {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, + {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, +] + +[package.dependencies] +setuptools = ">=3.0" + +[package.extras] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +groups = ["dev"] +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pytest" +version = "7.3.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "pytest-7.3.0-py3-none-any.whl", hash = "sha256:933051fa1bfbd38a21e73c3960cebdad4cf59483ddba7696c48509727e17f201"}, + {file = "pytest-7.3.0.tar.gz", hash = "sha256:58ecc27ebf0ea643ebfdf7fb1249335da761a00c9f955bcd922349bcb68ee57d"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "3.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-rerunfailures" +version = "10.3" +description = "pytest plugin to re-run tests to eliminate flaky failures" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "pytest-rerunfailures-10.3.tar.gz", hash = "sha256:d8244d799f89a6edb5e57301ddaeb3b6f10d6691638d51e80b371311592e28c6"}, + {file = "pytest_rerunfailures-10.3-py3-none-any.whl", hash = "sha256:6be6f96510bf94b54198bf15bc5568fe2cdff88e83875912e22d29810acf65ff"}, +] + +[package.dependencies] +packaging = ">=17.1" +pytest = ">=5.3" + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "setuptools" +version = "78.1.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["github-action"] +files = [ + {file = "setuptools-78.1.0-py3-none-any.whl", hash = "sha256:3e386e96793c8702ae83d17b853fb93d3e09ef82ec62722e61da5cd22376dcd8"}, + {file = "setuptools-78.1.0.tar.gz", hash = "sha256:18fd474d4a82a5f83dac888df697af65afa82dec7323d09c3e37d1f14288da54"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] +core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_full_version <= \"3.11.0a6\"" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "urllib3" +version = "1.26.15" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +groups = ["dev"] +files = [ + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.9" +content-hash = "889e307ed055d36586b6066665a8ccc12f5c7e320b0122cc3dc29ee8241dc14d" diff --git a/nested_poetry_project/pyproject.toml b/nested_poetry_project/pyproject.toml new file mode 100644 index 00000000..d72fc66a --- /dev/null +++ b/nested_poetry_project/pyproject.toml @@ -0,0 +1,27 @@ +[tool.poetry] +name = "nested_poetry_project_name" +version = "0.1.0" +description = "This is a testing package of GitHub Action reusable workflow" +authors = ["Chisanan232 "] +license = "Apache License 2.0" +packages = [{include = "nested_python_src"}] + +[tool.poetry.dependencies] +python = ">=3.9" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.0.0" +pytest-cov = "^3.0.0" +pytest-rerunfailures = "^10.2" +coverage = "^6.2" +coveralls = "^3.3.1" + +[tool.poetry.group.github-action] +optional = true + +[tool.poetry.group.github-action.dependencies] +gunicorn = "^20.1.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/nested_poetry_project/pytest.ini b/nested_poetry_project/pytest.ini new file mode 100644 index 00000000..61b70b41 --- /dev/null +++ b/nested_poetry_project/pytest.ini @@ -0,0 +1,14 @@ +# # This test directory for testing to simulate a Python library project structure. + +# pytest.ini +[pytest] +minversion = 0.1.0 + +addopts = + --cov=./nested_python_src + --cov-config=./.coveragerc + -r a + -v + --reruns 3 + +testpaths = diff --git a/nested_poetry_project/scripts/ci/build_git-tag_or_create_github-release.sh b/nested_poetry_project/scripts/ci/build_git-tag_or_create_github-release.sh new file mode 100644 index 00000000..4c3caca1 --- /dev/null +++ b/nested_poetry_project/scripts/ci/build_git-tag_or_create_github-release.sh @@ -0,0 +1,369 @@ +#!/usr/bin/env bash + +#set -ex + +# The file paths for auto-tag (git tag) and auto-release (GitHub release info) +Auto_Tag_And_Release_Dir=.github/tag_and_release +Auto_Tag_And_Release_Flag=release-auto-flag.txt +Auto_Release_Title=release-title.md +Auto_Release_Content=release-notes.md + +# Check whether it has 'release-notes.md' or 'release-title.md' in the target directory '.github'. +has_auto_release_flag=$(ls "$Auto_Tag_And_Release_Dir" | grep -E "$Auto_Tag_And_Release_Flag") +if [ "$has_auto_release_flag" == "" ]; then + echo "โš ๏ธ It should have *$Auto_Tag_And_Release_Flag* in '$Auto_Tag_And_Release_Dir/' directory of your project in HitHub." + exit 0 +else + auto_release_flag=$(cat "$Auto_Tag_And_Release_Dir/$Auto_Tag_And_Release_Flag") + if [ "$auto_release_flag" == false ]; then + echo "๐Ÿ’ค Auto-release flag is 'false' so it won't build git tag or create GitHub release." + exit 0 + fi +fi + +has_release_notes=$(ls "$Auto_Tag_And_Release_Dir" | grep -E "$Auto_Release_Content") +has_release_title=$(ls "$Auto_Tag_And_Release_Dir" | grep -E "$Auto_Release_Title") +if [ "$has_release_notes" == "" ]; then + echo "โŒ It should have *$Auto_Release_Content* in '$Auto_Tag_And_Release_Dir/' directory of your project in HitHub." + exit 1 +fi +if [ "$has_release_title" == "" ]; then + echo "โŒ It should have *$Auto_Release_Title* in '$Auto_Tag_And_Release_Dir/' directory of your project in HitHub." + exit 1 +fi + + +# # # # python-package or github-action-reusable-workflow +Input_Arg_Release_Type=$1 +Input_Arg_Debug_Mode=$2 + +keep_release="$KEEP_RELEASE_IF_PRE_VERSION" + +if [ "$Input_Arg_Release_Type" == "" ]; then + echo "โŒ The argument 'Input_Arg_Release_Type' (first argument) cannot be empty." + exit 1 +fi + +if [ "$Input_Arg_Release_Type" == 'python-package' ]; then + # # # # The name of Python package + Input_Arg_Python_Pkg_Name=$3 + # # # # For development and troubleshooting +# Input_Arg_Debug_Mode=$4 + Input_Arg_Software_Version_Format=$4 +elif [ "$Input_Arg_Release_Type" == 'github-action-reusable-workflow' ]; then + Input_Arg_Python_Pkg_Name="" + Input_Arg_Software_Version_Format="" +# Input_Arg_Debug_Mode=$2 +else + echo "โŒ Currently, it only has 2 release type: 'python-package' or 'github-action-reusable-workflow'." + exit 1 +fi +if [ "$Input_Arg_Debug_Mode" == "" ]; then + Input_Arg_Debug_Mode=true +fi + + +# # # # From the PEP440: Software version style rule +# # # +# # # The version setting 1: version format +# # Simple โ€œmajor.minorโ€ versioning: (general-2) +# 0.1, 0.2, 0.3, 1.0, 1.1 +# # Simple โ€œmajor.minor.microโ€ versioning: (general-3) +# 1.0.0, 1.0.1, 1.0.2, 1.1.0 +# # Date based releases, using an incrementing serial within each year, skipping zero: (date-based) +# 2012.1, 2012.2, ..., 2012.15, 2013.1, 2013.2 +# # # The version setting 2: version evolution +# # โ€œmajor.minorโ€ versioning with alpha, beta and candidate pre-releases: (sema) +# 0.9, 1.0a1, 1.0a2, 1.0b1, 1.0rc1, 1.0 +# # โ€œmajor.minorโ€ versioning with developmental releases, release candidates and post-releases for minor corrections: (dev) +# 0.9, 1.0.dev1, 1.0.dev2, 1.0.dev3, 1.0c1, 1.0, 1.0.post1, 1.1.dev1 +#Input_Arg_Software_Version_Format=$3 + +declare Software_Version_Reg +declare Python_Version_Reg + +declare version_reg +if [ "$Input_Arg_Software_Version_Format" == "general-2" ]; then + version_reg="[0-9]\.[0-9]" +elif [ "$Input_Arg_Software_Version_Format" == "general-3" ]; then + version_reg="[0-9]\.[0-9]\.[0-9]" +elif [ "$Input_Arg_Software_Version_Format" == "date-based" ]; then + version_reg="[0-9]{4}\.([0-9]{1,})+" +else + # Default value + version_reg="[0-9]\.[0-9]\.[0-9]" +fi +Software_Version_Reg="$version_reg*([\.,-]*([a-zA-Z]{1,})*([0-9]{0,})*){0,}" + +if [ "$Input_Arg_Release_Type" == 'python-package' ]; then + if [ "$Input_Arg_Python_Pkg_Name" == "" ]; then + echo "โŒ The argument 'Input_Arg_Python_Pkg_Name' (second argument) cannot be empty if option 'Input_Arg_Release_Type' (first argument) is 'python-package'." + exit 1 + fi + + Python_Version_Reg="__version__ = \"$Software_Version_Reg\"" +fi + +#if [ "$Input_Arg_Release_Type" == 'python-package' ]; then +# if [ "$software_version_evolution" == "sema" ]; then +# echo "*-*([a-zA-Z]{1,})*([0-9]{0,})" +# elif [ "$software_version_evolution" == "dev" ]; then +# echo "*[\.,-]*([a-zA-Z]{1,})*([0-9]{0,})" +# else +# # Default value +# echo "" +# fi +#fi + + +#Current_Branch=$(git branch --show-current) +# # # # For debug +#echo "Verify the git branch info" +#git branch --list | cat +#echo "Verify all the git branch info" +#git branch -a | cat +#echo "Verify the git remote info" +#git remote -v +#echo "Get the current git branch info" + +# This is the global value to provide after-handle to use +Current_Branch=$(git branch --list | cat | grep -E '\* ([a-zA-Z0-9]{1,16})' | grep -E -o '([a-zA-Z0-9]{1,16})') +echo "๐Ÿ”Ž ๐ŸŒณ Current git branch: $Current_Branch" + +git config --global user.name "Chisanan232" +git config --global user.email "chi10211201@cycu.org.tw" +git_global_username=$(git config --global user.name) +git_global_user_email=$(git config --global user.email) +echo "๐Ÿ”Ž ๐ŸŒณ Current git name: $git_global_username" +echo "๐Ÿ”Ž ๐ŸŒณ Current git email: $git_global_user_email" + +git pull +echo "๐Ÿ“ฉ ๐ŸŒณ git pull done" + +declare Tag_Version # This is the return value of function 'get_latest_version_by_git_tag' +get_latest_version_by_git_tag() { + # # # # The types to get version by tag: 'git' or 'github' + get_version_type=$1 + + if [ "$get_version_type" == "git" ]; then + echo "๐Ÿ”Ž ๐ŸŒณ ๐Ÿท Get the version info from git tag." + Tag_Version=$(git describe --tag --abbrev=0 --match "v[0-9]\.[0-9]\.[0-9]*" | grep -E -o '[0-9]\.[0-9]\.[0-9]*') + elif [ "$get_version_type" == "github" ]; then + echo "๐Ÿ”Ž ๐Ÿ™ ๐Ÿˆ ๐Ÿท Get the version info from GitHub release." + github_release=$(curl -s https://api.github.com/repos/Chisanan232/GitHub-Action_Reusable_Workflows-Python/releases/latest | jq -r '.tag_name') + Tag_Version=$(echo "$github_release" | grep -E -o '[0-9]\.[0-9]\.[0-9]*') + else + echo "โŒ Currently, it only has 2 valid options could use: 'git' or 'github'." + exit 1 + fi +} + + +declare New_Release_Version # This is the return value of function 'generate_new_version_as_tag' +declare New_Release_Tag # This is the return value of function 'generate_new_version_as_tag' +generate_new_version_as_tag() { + project_type=$1 + if [ "$project_type" == "python" ]; then + echo "๐Ÿ”Ž ๐Ÿ ๐Ÿ“ฆ Get the new version info from Python package." + New_Release_Version=$(cat ./"$Input_Arg_Python_Pkg_Name"/__pkg_info__.py | grep -E "$Python_Version_Reg" | grep -E -o "$Software_Version_Reg") + New_Release_Tag=$New_Release_Version + elif [ "$project_type" == "github-action_reusable-workflow" ]; then + echo "๐Ÿ”Ž ๐Ÿ™ ๐Ÿˆ ๐Ÿท Get the current version info from GitHub release." + # Generate the new version from previous tag + get_latest_version_by_git_tag 'github' + current_ver=$(echo "$Tag_Version" | head -n1 | cut -d "." -f1) + echo "๐Ÿ”Ž ๐Ÿ“ƒ Current Version: $current_ver" + +# current_ver=$(git describe --tag --abbrev=0 --match "v[0-9]\.[0-9]\.[0-9]" | grep -E -o '[0-9]\.[0-9]\.[0-9]' | head -n1 | cut -d "." -f1) + # NOTE: It it has value, the version is a semi-version number like '6.1.0' + # shellcheck disable=SC2002 + New_Release_Version=$(cat "$Auto_Tag_And_Release_Dir/$Auto_Release_Title" | grep -E -o "$Software_Version_Reg") + New_Release_Tag='v'$New_Release_Version + if [ "$New_Release_Version" == "" ]; then + # NOTE: The version is a pure number like '6' + if [ "$current_ver" == "" ]; then + current_ver=0 + fi + New_Release_Version=$(( current_ver + 1 )) + New_Release_Tag='v'$New_Release_Version'.0.0' + fi + fi +} + + +build_git_tag_or_github_release() { + # git event: push + # all branch -> Build tag + # master branch -> Build tag and create release + project_type=$1 + generate_new_version_as_tag "$project_type" + build_git_tag + build_github_release +} + + +build_git_tag() { + # git event: push + # all branch -> Build tag + # master branch -> Build tag and create release + ensure_release_tag_is_not_empty + + if [ "$Input_Arg_Debug_Mode" == true ]; then + echo " ๐Ÿ”๐Ÿ‘€ [DEBUG MODE] Build git tag $New_Release_Tag in git branch '$Current_Branch'." + else + git tag -a "$New_Release_Tag" -m "$New_Release_Tag" + git push -u origin --tags + fi + echo "๐ŸŽ‰ ๐Ÿป ๐ŸŒณ ๐Ÿท Build git tag which named '$New_Release_Tag' with current branch '$Current_Branch' successfully!" +} + + +build_github_release() { + # git event: push + # all branch -> Build tag + # master branch -> Build tag and create release + ensure_release_tag_is_not_empty + + if [ "$Current_Branch" == "master" ]; then + release_title=$(cat "$Auto_Tag_And_Release_Dir/$Auto_Release_Title") + + if [ "$Input_Arg_Debug_Mode" == true ]; then + echo " ๐Ÿ”๐Ÿ‘€ [DEBUG MODE] Create GitHub release with tag '$New_Release_Tag' and title '$release_title' in git branch '$Current_Branch'." + else + gh release create "$New_Release_Tag" --title "$release_title" --notes-file "$Auto_Tag_And_Release_Dir/$Auto_Release_Content" + fi + fi + echo "๐ŸŽ‰ ๐Ÿป ๐Ÿ™ ๐Ÿˆ ๐Ÿท Create GitHub release with title '$release_title' successfully!" +} + + +ensure_release_tag_is_not_empty() { + if [ "$New_Release_Tag" == "" ]; then + echo "โŒ The new release tag it got is empty. Please check version info in your repository." + exit 1 + else + echo "โœ… It gets new version info and it's *$New_Release_Tag*. It would keep running to set it." + fi +} + + +tag_and_release_python_project() { + git_tag=$(git describe --tag --abbrev=0 --match "v[0-9]\.[0-9]\.[0-9]*" | grep -o '[0-9]\.[0-9]\.[0-9]*') + github_release=$(curl -s https://api.github.com/repos/Chisanan232/GitHub-Action_Reusable_Workflows-Python/releases/latest | jq -r '.tag_name') + # shellcheck disable=SC2002 + generate_new_version_as_tag "python" + + build_git_tag=false + create_github_release=false + + # 1. Compare the Python source code version and git tag, GitHub release version. + if [ "$New_Release_Version" == "$git_tag" ]; then + echo "โœ… Version of git tag info are the same. So it verifies it has built and pushed before." + else + echo "โš ๏ธ Version of git tag info are different. So it verifies it doesn't build and push before." + build_git_tag=true + fi + + if [ "$Current_Branch" == "master" ] && [ "$New_Release_Version" == "$github_release" ]; then + echo "โœ… Version of GitHub release info are the same. So it verifies it has built and pushed before." + else + echo "โš ๏ธ Version of GitHub release info are different. So it verifies it doesn't build and push before." + create_github_release=true + fi + + # 1. -> Same -> 1-1. Does it have built and pushed before?. + # 1. -> No (In generally, it should no) -> 1-2. Is it a pre-release version in source code? + + # 1-1. Yes, it has built and pushed. -> Doesn't do anything. + # 1-1. No, it doesn't build and push before. -> Build and push directly. + + # 1-2. Yes, it's pre-release. -> Doesn't build and push. Just build git tag and GitHub release. + # 1-2. No, it's not pre-release. -> It means that it's official version, e.g., 1.3.2 version. So it should build git tag and GitHub release first, and build and push. + + if [ "$build_git_tag" == true ] || [ "$create_github_release" == true ]; then + + echo "๐Ÿ”Ž ๐Ÿ ๐Ÿ“ฆ Python package new release version: $New_Release_Version" + is_pre_release_version=$(echo $New_Release_Version | grep -E -o '([\.-]*([a-zA-Z]{1,})+([0-9]{0,})*){1,}') + echo "๐Ÿ”Ž ๐Ÿคฐ ๐Ÿ“ฆ is pre-release version: $is_pre_release_version" + if [ "$is_pre_release_version" == "" ] || [ "$keep_release" == "TRUE" ]; then + echo "๐ŸŽ“ ๐Ÿ ๐Ÿ“ฆ The version is a official-release." + # do different things with different ranches + # git event: push + # all branch -> Build tag + # master branch -> Build tag and create release + echo "๐Ÿ‘ท๐Ÿฝโ€โ™‚๏ธ ๐Ÿ“Œ Build tag and create GitHub release, also push code to PyPi" + build_git_tag_or_github_release "python" + echo "โœ… ๐ŸŽŠ ๐Ÿฅ‚ Done! This is Official-Release so please push source code to PyPi." + echo "[Python] [Final Running Result] Official-Release" + else + echo "The version is a pre-release." + # do different things with different ranches + # git event: push + # all branch -> Build tag + # master branch -> Build tag and create release + echo "๐Ÿ‘ท๐Ÿฝโ€โ™‚ ๏ธ๐Ÿ“Œ Build tag and create GitHub release only" + build_git_tag_or_github_release "python" + echo "โœ… ๐ŸŽŠ ๐Ÿฅ‚ Done! This is Pre-Release so please don't push this to PyPi." + echo "[Python] [Final Running Result] Pre-Release" + fi + + fi +} + + +tag_and_release_reusable_github_action_workflows_project() { + # # # # For GitHub Action reusable workflow template release + # 1. Compare whether the release-notes.md has different or not. + # Note 1: Diff a specific file with currently latest tag and previous one commit + # https://stackoverflow.com/questions/3338126/how-do-i-diff-the-same-file-between-two-different-commits-on-the-same-branch + # Note 2: Show the output result in stdout directly + # https://stackoverflow.com/questions/17077973/how-to-make-git-diff-write-to-stdout + # Note 3: Here code should be considered what git tag on master branch so we need to verify the info on master branch. + # Note 4: We should git fetch to provide git diff feature working + # https://github.com/actions/checkout/issues/160 + + echo "๐ŸŒณ โ›“ ๐ŸŒณ Run git fetch to sync upstream with latest project in GitHub" + git fetch --no-tags --prune --depth=1 origin +refs/heads/*:refs/remotes/origin/* + + echo "๐Ÿ”Ž ๐ŸŒณ ๐ŸŒณ Verify all the git branch info again after git fetch." + git branch -a | cat + + echo "๐Ÿ”Ž ๐Ÿ”— ๐ŸŒณ Verify the git remote info again after git fetch." + git remote -v + + echo "๐Ÿ”ฌ ๐Ÿ“„ ๐ŸŒณ โ›“ ๐ŸŒณ Check the different of '$Auto_Tag_And_Release_Dir/$Auto_Release_Content' between current git branch and master branch ..." + # # v1: compare by git branches +# release_notes_has_diff=$(git diff origin/master "$Current_Branch" -- "$Auto_Tag_And_Release_Dir/$Auto_Release_Content" | cat) + # # v2: compare by git tag + all_git_tags=$(git tag -l | cat) + declare -a all_git_tags_array=( $(echo "$all_git_tags" | awk -v RS='' '{gsub("\n"," "); print}') ) + all_git_tags_array_len=${#all_git_tags_array[@]} + latest_git_tag=${all_git_tags_array[$all_git_tags_array_len - 1]} + echo "๐Ÿ”Ž ๐ŸŒณ ๐Ÿท The latest git tag: $latest_git_tag" + + release_notes_has_diff=$(git diff "$latest_git_tag" "$Current_Branch" -- "$Auto_Tag_And_Release_Dir/$Auto_Release_Content" | cat) + echo "๐Ÿ”Ž ๐Ÿ”ฌ ๐Ÿ“„ different of '$Auto_Tag_And_Release_Dir/$Auto_Release_Content': $release_notes_has_diff" + + if [ "$release_notes_has_diff" != "" ]; then + # 1. Yes, it has different. -> Build git tag, GitHub release and version branch + build_git_tag_or_github_release "github-action_reusable-workflow" + echo "โœ… ๐ŸŽŠ ๐Ÿฅ‚ Done! This is Official-Release of GitHub Action reusable workflow, please create a version branch of it." + echo "[GitHub Action - Reusable workflow] [Final Running Result] Official-Release and version: $New_Release_Version" + else + # 1. No, do nothing. + # Return nothing output + echo "๐Ÿ’ค Release note file doesn't change. Don't do anything." + echo "[GitHub Action - Reusable workflow] [Final Running Result] Pre-Release" + fi +} + + +# The truly running implementation of shell script +if [ "$Input_Arg_Release_Type" == 'python-package' ]; then + # # # # For Python package release + echo "๐Ÿƒโ€โ™‚ ๏ธ๐Ÿ ๐Œš Run python package releasing process" + tag_and_release_python_project +elif [ "$Input_Arg_Release_Type" == 'github-action-reusable-workflow' ]; then + echo "๐Ÿƒโ€โ™‚ ๐Ÿ™ ๐Ÿˆ ๐Œš Run github-action-reusable-workflow releasing process" + tag_and_release_reusable_github_action_workflows_project +fi diff --git a/nested_poetry_project/scripts/ci/check-input-params.sh b/nested_poetry_project/scripts/ci/check-input-params.sh new file mode 100644 index 00000000..15dcce75 --- /dev/null +++ b/nested_poetry_project/scripts/ci/check-input-params.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +#set -ex +upload_report_to_platform_flag=$1 +platform_token=$2 + +echo "๐Ÿ” Start to check input parameters ..." +if [ "$upload_report_to_platform_flag" = true ]; then + echo "โœ… This using flag of uploading platform is true." + if [ "$platform_token" = "" ]; then + echo "โš ๏ธ๏ธ The using flag of uploading to platform is true but it has no Token of it." + echo "โŒ It needs a Token to let CI could use it authenticates and uploads report to the platform. Please configure a Token to it." + exit 1 + else + echo "๐Ÿป It has a Token!" + fi +else + echo "๐Ÿ’ค It doesn't upload report to this platform." +fi diff --git a/nested_poetry_project/scripts/ci/check_getting_output.sh b/nested_poetry_project/scripts/ci/check_getting_output.sh new file mode 100644 index 00000000..cd5ff6b8 --- /dev/null +++ b/nested_poetry_project/scripts/ci/check_getting_output.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +#set -ex + +release_version=$(echo $RELEASE_TYPE) +if [ "$release_version" != "" ]; then + echo "๐Ÿ“ฌ๐ŸŽ‰๐Ÿป It gets data which is release version info!" + exit 0 +else + echo "๐Ÿ“ญ๐Ÿ™ˆ It doesn't get any data which is release version info." + exit 1 +fi diff --git a/nested_poetry_project/scripts/ci/combine_coverage_reports.sh b/nested_poetry_project/scripts/ci/combine_coverage_reports.sh new file mode 100644 index 00000000..e11abef2 --- /dev/null +++ b/nested_poetry_project/scripts/ci/combine_coverage_reports.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +set -ex + +test_type=$1 +test_coverage_report_format=$2 + +coveragedatafile=".coverage.$test_type" + +if [ "$test_type" == "all-test" ]; +then + coverage combine --data-file="$coveragedatafile" "$test_coverage_report_format"* +else + coverage combine --data-file="$coveragedatafile" "$test_coverage_report_format$test_type"* +fi + +coverage report -m --data-file="$coveragedatafile" +coverage xml --data-file="$coveragedatafile" -o coverage_"$test_type".xml +cp "$coveragedatafile" .coverage +echo "โœ… All processing done." && exit 0 diff --git a/nested_poetry_project/scripts/ci/deployment_new_version_workflow.sh b/nested_poetry_project/scripts/ci/deployment_new_version_workflow.sh new file mode 100644 index 00000000..5d91f92e --- /dev/null +++ b/nested_poetry_project/scripts/ci/deployment_new_version_workflow.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +debug_mode=$1 + +final_release_type=$RELEASE_TYPE +if [ "$final_release_type" == "Pre" ]; then + echo "๐Ÿ’ค It detects Pre-Release flag. So it does NOT do anything in deployment process." +else + echo "๐Ÿ“ฌ It detects Official-Release flag." + if [ "$debug_mode" == true ]; then + echo " ๐Ÿ”๐Ÿ‘€[DEBUG MODE] Create new git branch for the new version $final_release_type." + else + git remote add github-action_workflow-template https://github.com/Chisanan232/GitHub-Action-Template-Python.git + echo "๐Ÿ”—๐Ÿ“„ Add git remote reference." + git remote -v + echo "๐Ÿ” Check all git remote reference." + git checkout -b "v$final_release_type" + echo "โ›“ Create a new git branch as version." + git push -u github-action_workflow-template "v$final_release_type" + echo "๐Ÿป๐ŸŽ‰ Push the source code as a branch with one specific version to the GitHub." + fi +fi + +echo "๐ŸŽŠ๐Ÿฅ‚ Done!" diff --git a/nested_poetry_project/scripts/ci/generate_release_info.sh b/nested_poetry_project/scripts/ci/generate_release_info.sh new file mode 100644 index 00000000..e8c38cf5 --- /dev/null +++ b/nested_poetry_project/scripts/ci/generate_release_info.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +Input_Arg_Project_Type=$1 +Input_Arg_Debug_Mode=$2 + +release=$(bash ./scripts/ci/build_git-tag_or_create_github-release.sh "$Input_Arg_Project_Type" "$Input_Arg_Debug_Mode") +echo "๐Ÿ“„ Release log: $release" + +release_version=$(echo "$release" | grep -E "\[GitHub Action - Reusable workflow\] \[Final Running Result\] (Official\-Release and version: ([0-9]{1,}\.[0-9]{1,})|(Pre\-Release))" | grep -E -o "(([0-9]{1,}\.[0-9]{1,})|(Pre\-Release))") +is_first_release_version=$(echo "$release_version" | grep -E -o "([0-9]{1,}\.0\.{0,1}0{0,1})") +if [ "$is_first_release_version" != "" ]; then + # shellcheck disable=SC2125 + # shellcheck disable=SC2207 + # shellcheck disable=SC2034 + release_version_array=($(echo "$release_version" | grep -E -o "([0-9]{1,})")) + # shellcheck disable=SC2125 + release_version="${release_version_array[0]}" +fi +echo " ๐Ÿ“ฒ Target version which would be pass to deployment process: $release_version" diff --git a/nested_poetry_project/scripts/ci/get-all-tests.sh b/nested_poetry_project/scripts/ci/get-all-tests.sh new file mode 100644 index 00000000..a645d5ed --- /dev/null +++ b/nested_poetry_project/scripts/ci/get-all-tests.sh @@ -0,0 +1,95 @@ +#!/usr/bin/env bash + +set -ex + +base_directory=$1 +if [ "$base_directory" == "" ]; +then + base_directory="test/" +fi + +runtime_os=$2 +if [ "$runtime_os" == "" ]; +then + runtime_os="unix" +fi + +declare -a all_test_subpkgs=( "$base_directory" ) + +get_all_test_subpackage() { + # Get all test directories (python subpackage) + # Note: use dept-first search algorithm + declare index=0 + if [ "$1" ]; + then + index=$1 + fi + + declare test_subpkg="${all_test_subpkgs[$index]}" + if [ "$test_subpkg" != "" ]; + then + # Still has test subpackage won't scan + declare test_path="$test_subpkg*/" + # shellcheck disable=SC2086 + declare -a test_subpkg_array=( $(ls -d $test_path | grep -v '__pycache__') ) + + if [ ${#test_subpkg_array[@]} != 0 ]; + then + # No any directory under this path, try to get the test modules + all_test_subpkgs+=( "${test_subpkg_array[@]}" ) + # shellcheck disable=SC2004 + get_all_test_subpackage $(( $index + 1 )) + else + # Has some directories under this path, keep searching + if [ ${#all_test_subpkgs[@]} != "$index" ]; + then + # shellcheck disable=SC2004 + get_all_test_subpackage $(( $index + 1 )) + fi + fi + fi +} + +declare all_tests + +get_all_test_modules_under_subpkg() { + # Get all test modules with one specific subpackage (directory has __init__.py file) + declare -a testpatharray=( $(ls -F "$1" | grep -v '/$' | grep -v '__init__.py' | grep -v 'test_config.py' | grep -v -E '^_[a-z_]{1,64}.py' | grep -v '__pycache__')) + + declare -a alltestpaths + for test_module_path in "${testpatharray[@]}"; + do + alltestpaths+=("$1$test_module_path") + done + + # shellcheck disable=SC2124 + # shellcheck disable=SC2178 + all_tests+="${alltestpaths[@]} " +} + +get_all_test_modules() { + # Get all test modules under these test subpackages + for test_subpkg in "${all_test_subpkgs[@]}"; + do + get_all_test_modules_under_subpkg "$test_subpkg" + done +} + +# Get all test module paths +get_all_test_subpackage +get_all_test_modules + +# Process data as list type value +dest=( "${all_tests[@]}" ) + + +# Output the final result about all test modules +if echo "$runtime_os" | grep -q "windows"; +then + printf "${dest[@]}" | jq -R . +elif echo "$runtime_os" | grep -q "unix"; +then + printf '%s\n' "${dest[@]}" | jq -R . | jq -cs . +else + printf 'error' | jq -R . +fi diff --git a/nested_poetry_project/scripts/ci/test/get-integration-test-paths.sh b/nested_poetry_project/scripts/ci/test/get-integration-test-paths.sh new file mode 100644 index 00000000..45ab8e9c --- /dev/null +++ b/nested_poetry_project/scripts/ci/test/get-integration-test-paths.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +set -ex +runtime_os=$1 + +declare -a base_tests + +getalltests() { + declare -a testpatharray=( $(ls -F "$1" | grep -v '/$' | grep -v '__init__.py' | grep -v 'test_config.py' | grep -v -E '^_[a-z_]{1,64}.py' | grep -v '__pycache__')) + + declare -a alltestpaths + for (( i = 0; i < ${#testpatharray[@]}; i++ )) ; do + alltestpaths[$i]=$1${testpatharray[$i]} + done + + base_tests=("${alltestpaths[@]}") +} + +base_path=test/integration_test/ + +getalltests $base_path + +dest=( "${base_tests[@]}" ) + + +if echo "$runtime_os" | grep -q "windows"; +then + printf '%s\n' "${dest[@]}" | jq -R . +elif echo "$runtime_os" | grep -q "unix"; +then + printf '%s\n' "${dest[@]}" | jq -R . | jq -cs . +else + printf 'error' | jq -R . +fi diff --git a/nested_poetry_project/scripts/ci/test/test_pgk_install.py b/nested_poetry_project/scripts/ci/test/test_pgk_install.py new file mode 100644 index 00000000..3c2a4ca1 --- /dev/null +++ b/nested_poetry_project/scripts/ci/test/test_pgk_install.py @@ -0,0 +1,3 @@ +from test_gh_workflow import sample + +sample.hello_python() diff --git a/nested_poetry_project/scripts/run_pytest_in_develop.sh b/nested_poetry_project/scripts/run_pytest_in_develop.sh new file mode 100644 index 00000000..0c21f47b --- /dev/null +++ b/nested_poetry_project/scripts/run_pytest_in_develop.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash + +########################################################################################## +# +# Target: +# For develop to be more easier to run testing via *pytest*. +# +# Description: +# It does 2 things: run script for getting testing items and run the testing via tool *pytest*. +# This bash file must to receive a argument *testing_type* which is the key condition to let +# script runs unit test or integration test. +# +# Allowable argument: +# * unit-test: Get and run unit test. +# * integration-test: Get and run integration test. +# +########################################################################################## + +set -exm +testing_type=$1 +echo "โš™๏ธ It would run the " + testing_type + " of the Python package SmoothCrawler-Cluster." + +echo "๐Ÿ” Get the testing items ... โณ" + +if echo "$testing_type" | grep -q "unit-test"; +then + test_path=$(bash ./scripts/ci/get-unit-test-paths.sh windows | sed "s/\"//g" | sed 's/^//') +elif echo "$testing_type" | grep -q "integration-test"; +then + test_path=$(bash ./scripts/ci/get-integration-test-paths.sh windows | sed "s/\"//g" | sed 's/^//') +else + test_path='error' +fi + +if echo $test_path | grep -q "error"; +then + echo "โŒ Got error when it tried to get testing items... ๐Ÿ˜ฑ" + exit 1 +else + echo "๐ŸŽ‰๐ŸŽŠ๐Ÿพ Get the testing items successfully!" + echo "๐Ÿ“„ The testing items are: " + echo $test_path + + echo "๐Ÿค–โš’ It would start to run testing with Python testing framework *pytest*." + pytest $test_path +fi + diff --git a/nested_poetry_project/test/__init__.py b/nested_poetry_project/test/__init__.py new file mode 100644 index 00000000..9730af16 --- /dev/null +++ b/nested_poetry_project/test/__init__.py @@ -0,0 +1 @@ +# # This test directory for testing to simulate a Python library project structure. diff --git a/nested_poetry_project/test/_http_server/app.py b/nested_poetry_project/test/_http_server/app.py new file mode 100644 index 00000000..896856ef --- /dev/null +++ b/nested_poetry_project/test/_http_server/app.py @@ -0,0 +1,61 @@ +""" +A simple HTTP server for testing. It would return a JSON type data. +""" + +from flask import Flask, request + + +app: Flask = Flask(__name__) + + +@app.route("/exchangeReport/STOCK_DAY", methods=["GET"]) +def get_stock_data() -> str: + """ + API: /exchangeReport/STOCK_DAY + API Parameters: + * response: The data format. Default is JSON type. + * date: The date of data. + * stockNo: The stock symbol no. + + Example: + http://10.20.23.3:12345/exchangeReport/STOCK_DAY?response=json&date=20170101&stockNo=2330 + + :return: A string type data with JSON type format. + """ + + _response = request.args.get("response", "json") + _date = request.args.get("date", None) # Example: 20170101 + _stockNo = request.args.get("stockNo", None) # Example: 2330 + + _data = '{' \ + '"stat":"OK",' \ + f'"date":"{_date}",' \ + f'"title":"111ๅนด06ๆœˆ {_stockNo} ๅฐ็ฉ้›ป ๅ„ๆ—ฅๆˆไบค่ณ‡่จŠ",' \ + '"fields":["ๆ—ฅๆœŸ","ๆˆไบค่‚กๆ•ธ","ๆˆไบค้‡‘้ก","้–‹็›คๅƒน","ๆœ€้ซ˜ๅƒน","ๆœ€ไฝŽๅƒน","ๆ”ถ็›คๅƒน","ๆผฒ่ทŒๅƒนๅทฎ","ๆˆไบค็ญ†ๆ•ธ"],' \ + '"data":[' \ + '["111/06/01","32,970,903","18,171,598,472","550.00","555.00","548.00","549.00","-11.00","33,456"],' \ + '["111/06/02","26,063,495","14,122,936,388","544.00","545.00","540.00","540.00","-9.00","30,042"],' \ + '["111/06/06","23,732,327","12,843,324,209","541.00","544.00","538.00","540.00"," 0.00","16,614"],' \ + '["111/06/07","22,152,512","11,846,386,906","535.00","538.00","532.00","535.00","-5.00","28,586"],' \ + '["111/06/08","19,609,522","10,636,701,303","539.00","545.00","538.00","544.00","+9.00","18,487"],' \ + '["111/06/09","16,894,479","9,115,934,006","538.00","542.00","537.00","541.00","-3.00","18,802"],' \ + '["111/06/10","22,614,596","12,011,615,014","530.00","533.00","529.00","530.00","-11.00","44,802"],' \ + '["111/06/13","36,758,925","18,998,155,460","518.00","519.00","515.00","516.00","-14.00","112,023"],' \ + '["111/06/14","38,838,778","19,813,036,892","507.00","514.00","507.00","513.00","-3.00","85,483"],' \ + '["111/06/15","38,360,508","19,580,150,319","508.00","515.00","508.00","509.00","-4.00","72,687"],' \ + '["111/06/16","31,908,028","16,331,470,764","515.00","516.00","507.00","508.00","X0.00","42,177"],' \ + '["111/06/17","48,400,798","24,260,277,915","499.50","503.00","499.00","501.00","-7.00","119,618"],' \ + '["111/06/20","36,664,463","18,267,359,790","500.00","502.00","495.00","498.00","-3.00","89,541"],' \ + '["111/06/21","34,432,537","17,298,234,720","501.00","505.00","499.00","505.00","+7.00","32,427"],' \ + '["111/06/22","33,438,921","16,630,857,096","501.00","503.00","494.50","494.50","-10.50","81,024"],' \ + '["111/06/23","46,808,462","22,836,692,325","492.00","493.50","485.00","485.50","-9.00","104,661"],' \ + '["111/06/24","29,003,676","14,184,287,155","489.50","492.50","485.50","486.50","+1.00","43,609"],' \ + '["111/06/27","38,684,368","19,379,396,938","496.00","506.00","495.50","498.50","+12.00","37,438"],' \ + '["111/06/28","16,867,955","8,392,290,378","496.00","500.00","496.00","497.50","-1.00","18,988"],' \ + '["111/06/29","33,124,986","16,352,376,816","496.00","498.50","491.00","491.00","-6.50","40,024"],' \ + '["111/06/30","49,820,824","23,900,613,642","484.50","486.50","476.00","476.00","-15.00","111,117"]' \ + '],' \ + '"notes":["็ฌฆ่™Ÿ่ชชๆ˜Ž:+/-/X่กจ็คบๆผฒ/่ทŒ/ไธๆฏ”ๅƒน","็•ถๆ—ฅ็ตฑ่จˆ่ณ‡่จŠๅซไธ€่ˆฌใ€้›ถ่‚กใ€็›คๅพŒๅฎšๅƒนใ€้‰…้กไบคๆ˜“๏ผŒไธๅซๆ‹่ณฃใ€ๆจ™่ณผใ€‚","ETF่ญ‰ๅˆธไปฃ่™Ÿ็ฌฌๅ…ญ็ขผ็‚บKใ€Mใ€Sใ€C่€…๏ผŒ่กจ็คบ่ฉฒETFไปฅๅค–ๅนฃไบคๆ˜“ใ€‚"]' \ + '}' + return _data + diff --git a/nested_poetry_project/test/_http_server/test_data.json b/nested_poetry_project/test/_http_server/test_data.json new file mode 100644 index 00000000..1836797a --- /dev/null +++ b/nested_poetry_project/test/_http_server/test_data.json @@ -0,0 +1,24 @@ +{ + "stat":"OK", + "date":"20220820", + "title":"111ๅนด08ๆœˆ 2330 ๅฐ็ฉ้›ป ๅ„ๆ—ฅๆˆไบค่ณ‡่จŠ", + "fields":["ๆ—ฅๆœŸ","ๆˆไบค่‚กๆ•ธ","ๆˆไบค้‡‘้ก","้–‹็›คๅƒน","ๆœ€้ซ˜ๅƒน","ๆœ€ไฝŽๅƒน","ๆ”ถ็›คๅƒน","ๆผฒ่ทŒๅƒนๅทฎ","ๆˆไบค็ญ†ๆ•ธ"], + "data":[ + ["111/08/01","24,991,291","12,569,771,761","506.00","508.00","500.00","504.00","-5.00","26,792"], + ["111/08/02","42,669,591","20,973,293,337","494.00","496.00","488.50","492.00","-12.00","63,879"], + ["111/08/03","29,838,832","14,823,224,632","494.00","501.00","493.00","501.00","+9.00","25,570"], + ["111/08/04","26,589,086","13,279,624,282","499.00","503.00","495.00","500.00","-1.00","27,173"], + ["111/08/05","35,052,642","17,966,410,242","509.00","516.00","507.00","516.00","+16.00","49,928"], + ["111/08/08","20,568,971","10,531,710,250","510.00","515.00","509.00","512.00","-4.00","18,131"], + ["111/08/09","24,370,709","12,372,442,661","507.00","511.00","504.00","510.00","-2.00","25,433"], + ["111/08/10","22,112,239","11,075,581,424","500.00","503.00","499.50","500.00","-10.00","35,188"], + ["111/08/11","24,906,177","12,771,121,611","513.00","514.00","510.00","514.00","+14.00","23,949"], + ["111/08/12","21,343,450","11,016,097,043","515.00","518.00","514.00","517.00","+3.00","21,701"], + ["111/08/15","22,519,886","11,755,494,600","520.00","524.00","519.00","523.00","+6.00","27,372"], + ["111/08/16","21,234,122","11,141,160,337","526.00","526.00","523.00","525.00","+2.00","20,628"], + ["111/08/17","28,461,939","14,943,047,011","524.00","527.00","521.00","527.00","+2.00","26,466"], + ["111/08/18","18,721,898","9,734,756,997","520.00","521.00","519.00","520.00","-7.00","24,209"], + ["111/08/19","14,235,983","7,403,584,002","519.00","523.00","517.00","519.00","-1.00","14,069"] + ], + "notes":["็ฌฆ่™Ÿ่ชชๆ˜Ž:+/-/X่กจ็คบๆผฒ/่ทŒ/ไธๆฏ”ๅƒน","็•ถๆ—ฅ็ตฑ่จˆ่ณ‡่จŠๅซไธ€่ˆฌใ€้›ถ่‚กใ€็›คๅพŒๅฎšๅƒนใ€้‰…้กไบคๆ˜“๏ผŒไธๅซๆ‹่ณฃใ€ๆจ™่ณผใ€‚","ETF่ญ‰ๅˆธไปฃ่™Ÿ็ฌฌๅ…ญ็ขผ็‚บKใ€Mใ€Sใ€C่€…๏ผŒ่กจ็คบ่ฉฒETFไปฅๅค–ๅนฃไบคๆ˜“ใ€‚"] +} \ No newline at end of file diff --git a/nested_poetry_project/test/integration_test/__init__.py b/nested_poetry_project/test/integration_test/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/nested_poetry_project/test/integration_test/sample.py b/nested_poetry_project/test/integration_test/sample.py new file mode 100644 index 00000000..434775c9 --- /dev/null +++ b/nested_poetry_project/test/integration_test/sample.py @@ -0,0 +1,15 @@ +import nested_python_src.sample +import logging +import pytest + + +@pytest.fixture(scope="function") +def get_hello_python() -> str: + return nested_python_src.sample.hello_python() + + +def test_sample(get_hello_python: str) -> None: + logging.info("Start Integration test.") + assert get_hello_python == "Hello Python", "The return value should be 'Hello Python'." + logging.info("This is Integration test done.") + diff --git a/nested_poetry_project/test/unit_test/__init__.py b/nested_poetry_project/test/unit_test/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/nested_poetry_project/test/unit_test/sample.py b/nested_poetry_project/test/unit_test/sample.py new file mode 100644 index 00000000..a4e8d5f8 --- /dev/null +++ b/nested_poetry_project/test/unit_test/sample.py @@ -0,0 +1,15 @@ +import nested_python_src.sample +import logging +import pytest + + +@pytest.fixture(scope="function") +def get_hello_python() -> str: + return nested_python_src.sample.hello_python() + + +def test_sample(get_hello_python: str) -> None: + logging.info("Start Unit test.") + assert get_hello_python == "Hello Python", "The return value should be 'Hello Python'." + logging.info("This is Unit test done.") + diff --git a/nested_poetry_project/test/unit_test/subpkg_1/__init__.py b/nested_poetry_project/test/unit_test/subpkg_1/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/nested_poetry_project/test/unit_test/subpkg_1/sample_one_layer.py b/nested_poetry_project/test/unit_test/subpkg_1/sample_one_layer.py new file mode 100644 index 00000000..a4e8d5f8 --- /dev/null +++ b/nested_poetry_project/test/unit_test/subpkg_1/sample_one_layer.py @@ -0,0 +1,15 @@ +import nested_python_src.sample +import logging +import pytest + + +@pytest.fixture(scope="function") +def get_hello_python() -> str: + return nested_python_src.sample.hello_python() + + +def test_sample(get_hello_python: str) -> None: + logging.info("Start Unit test.") + assert get_hello_python == "Hello Python", "The return value should be 'Hello Python'." + logging.info("This is Unit test done.") + diff --git a/nested_poetry_project/test/unit_test/subpkg_1/sample_one_layer_2.py b/nested_poetry_project/test/unit_test/subpkg_1/sample_one_layer_2.py new file mode 100644 index 00000000..a4e8d5f8 --- /dev/null +++ b/nested_poetry_project/test/unit_test/subpkg_1/sample_one_layer_2.py @@ -0,0 +1,15 @@ +import nested_python_src.sample +import logging +import pytest + + +@pytest.fixture(scope="function") +def get_hello_python() -> str: + return nested_python_src.sample.hello_python() + + +def test_sample(get_hello_python: str) -> None: + logging.info("Start Unit test.") + assert get_hello_python == "Hello Python", "The return value should be 'Hello Python'." + logging.info("This is Unit test done.") + diff --git a/nested_poetry_project/test/unit_test/subpkg_2/__init__.py b/nested_poetry_project/test/unit_test/subpkg_2/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/nested_poetry_project/test/unit_test/subpkg_2/sample_2_layer.py b/nested_poetry_project/test/unit_test/subpkg_2/sample_2_layer.py new file mode 100644 index 00000000..a4e8d5f8 --- /dev/null +++ b/nested_poetry_project/test/unit_test/subpkg_2/sample_2_layer.py @@ -0,0 +1,15 @@ +import nested_python_src.sample +import logging +import pytest + + +@pytest.fixture(scope="function") +def get_hello_python() -> str: + return nested_python_src.sample.hello_python() + + +def test_sample(get_hello_python: str) -> None: + logging.info("Start Unit test.") + assert get_hello_python == "Hello Python", "The return value should be 'Hello Python'." + logging.info("This is Unit test done.") + diff --git a/nested_poetry_project/test/unit_test/subpkg_2/sample_2_layer_2.py b/nested_poetry_project/test/unit_test/subpkg_2/sample_2_layer_2.py new file mode 100644 index 00000000..a4e8d5f8 --- /dev/null +++ b/nested_poetry_project/test/unit_test/subpkg_2/sample_2_layer_2.py @@ -0,0 +1,15 @@ +import nested_python_src.sample +import logging +import pytest + + +@pytest.fixture(scope="function") +def get_hello_python() -> str: + return nested_python_src.sample.hello_python() + + +def test_sample(get_hello_python: str) -> None: + logging.info("Start Unit test.") + assert get_hello_python == "Hello Python", "The return value should be 'Hello Python'." + logging.info("This is Unit test done.") + diff --git a/nested_poetry_project/test/unit_test/subpkg_2/sample_2_layer_3.py b/nested_poetry_project/test/unit_test/subpkg_2/sample_2_layer_3.py new file mode 100644 index 00000000..a4e8d5f8 --- /dev/null +++ b/nested_poetry_project/test/unit_test/subpkg_2/sample_2_layer_3.py @@ -0,0 +1,15 @@ +import nested_python_src.sample +import logging +import pytest + + +@pytest.fixture(scope="function") +def get_hello_python() -> str: + return nested_python_src.sample.hello_python() + + +def test_sample(get_hello_python: str) -> None: + logging.info("Start Unit test.") + assert get_hello_python == "Hello Python", "The return value should be 'Hello Python'." + logging.info("This is Unit test done.") + diff --git a/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/__init__.py b/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/sample_2-1_layer.py b/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/sample_2-1_layer.py new file mode 100644 index 00000000..a4e8d5f8 --- /dev/null +++ b/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/sample_2-1_layer.py @@ -0,0 +1,15 @@ +import nested_python_src.sample +import logging +import pytest + + +@pytest.fixture(scope="function") +def get_hello_python() -> str: + return nested_python_src.sample.hello_python() + + +def test_sample(get_hello_python: str) -> None: + logging.info("Start Unit test.") + assert get_hello_python == "Hello Python", "The return value should be 'Hello Python'." + logging.info("This is Unit test done.") + diff --git a/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/subpkg_2-1-1/__init__.py b/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/subpkg_2-1-1/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/subpkg_2-1-1/sample_2-1-1_layer.py b/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/subpkg_2-1-1/sample_2-1-1_layer.py new file mode 100644 index 00000000..a4e8d5f8 --- /dev/null +++ b/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/subpkg_2-1-1/sample_2-1-1_layer.py @@ -0,0 +1,15 @@ +import nested_python_src.sample +import logging +import pytest + + +@pytest.fixture(scope="function") +def get_hello_python() -> str: + return nested_python_src.sample.hello_python() + + +def test_sample(get_hello_python: str) -> None: + logging.info("Start Unit test.") + assert get_hello_python == "Hello Python", "The return value should be 'Hello Python'." + logging.info("This is Unit test done.") + diff --git a/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/subpkg_2-1-1/sample_2-1-1_layer_2.py b/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/subpkg_2-1-1/sample_2-1-1_layer_2.py new file mode 100644 index 00000000..a4e8d5f8 --- /dev/null +++ b/nested_poetry_project/test/unit_test/subpkg_2/subpkg_2-1/subpkg_2-1-1/sample_2-1-1_layer_2.py @@ -0,0 +1,15 @@ +import nested_python_src.sample +import logging +import pytest + + +@pytest.fixture(scope="function") +def get_hello_python() -> str: + return nested_python_src.sample.hello_python() + + +def test_sample(get_hello_python: str) -> None: + logging.info("Start Unit test.") + assert get_hello_python == "Hello Python", "The return value should be 'Hello Python'." + logging.info("This is Unit test done.") +