Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added .coverage
Binary file not shown.
2 changes: 2 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
[flake8]
ignore = E501, W503
max-line-length = 120

exclude = examples/grpc/*_pb2*.py
40 changes: 20 additions & 20 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,25 +16,25 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.8", "3.9", "3.10"]
python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13" ]

steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pytest tests
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pytest tests
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ fail_fast: false

repos:
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.11.2
rev: v1.15.0
hooks:
- id: mypy # Run mypy type checker; info: runs with flag --missing-imports
args: [ --config-file=mypy.ini ]
Expand Down Expand Up @@ -44,7 +44,7 @@ repos:
^.*_pb2*
)
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v5.0.0
hooks:
- id: check-added-large-files # Prevent giant files from being committed
args: [ maxkb=1024 ]
Expand All @@ -67,7 +67,7 @@ repos:
files: 'requirements/requirements.*\.txt$'

- repo: https://github.com/pycqa/flake8
rev: 7.1.1
rev: 7.1.2
hooks:
- id: flake8

Expand Down
8 changes: 8 additions & 0 deletions RELEASE.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,14 @@

*****************

## Release ONDEWO LOGGING PYTHON 3.5.0

### Improvements

* Added AsyncTimer and async logger #21

*****************

## Release ONDEWO LOGGING PYTHON 3.4.0

### Improvements
Expand Down
5 changes: 5 additions & 0 deletions envs/local.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# without fluentd
ONDEWO_LOGGING_CONFIG_FILE=ondewo/logging/config/logging-no-fluentd.yaml

# with fluentd
# ONDEWO_LOGGING_CONFIG_FILE=ondewo/logging/config/logging.yaml
13 changes: 13 additions & 0 deletions examples/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Copyright 2021-2024 ONDEWO GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
13 changes: 13 additions & 0 deletions examples/grpc/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Copyright 2021-2024 ONDEWO GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
113 changes: 113 additions & 0 deletions examples/grpc/grpc_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An example of multiprocessing concurrency with gRPC."""

from __future__ import (
absolute_import,
division,
print_function,
)

import atexit
import logging
import multiprocessing
import operator
import sys
import time

import grpc # type: ignore[import]

import prime_pb2 # type: ignore[import]
import prime_pb2_grpc # type: ignore[import]

_PROCESS_COUNT = 20
_MAXIMUM_CANDIDATE = 10000

# Each worker process initializes a single channel after forking.
# It's regrettable, but to ensure that each subprocess only has to instantiate
# a single channel to be reused across all RPCs, we use globals.
_worker_channel_singleton = None
_worker_stub_singleton = None

_LOGGER = logging.getLogger(__name__)


def _shutdown_worker():
_LOGGER.info("Shutting worker process down.")
if _worker_channel_singleton is not None:
_worker_channel_singleton.close()


def _initialize_worker(server_address):
global _worker_channel_singleton # pylint: disable=global-statement
global _worker_stub_singleton # pylint: disable=global-statement
# _LOGGER.info("Initializing worker process.")
_worker_channel_singleton = grpc.insecure_channel(server_address)
_worker_stub_singleton = prime_pb2_grpc.PrimeCheckerStub(
_worker_channel_singleton
)
atexit.register(_shutdown_worker)


def _run_worker_query(primality_candidate):
# _LOGGER.info("Checking primality of %s.", primality_candidate)
result = _worker_stub_singleton.check(
prime_pb2.PrimeCandidate(candidate=primality_candidate)
)
# print(result)
return result


def _calculate_primes(server_address):
worker_pool = multiprocessing.Pool(
processes=_PROCESS_COUNT,
initializer=_initialize_worker,
initargs=(server_address,),
)
check_range = range(2, _MAXIMUM_CANDIDATE)
primality = worker_pool.map(_run_worker_query, check_range)
primes = zip(check_range, map(operator.attrgetter("isPrime"), primality))
# _LOGGER.info(f"primes: {primes}")
return tuple(primes)


def main():
# msg:str = "Determine the primality of the first {} integers.".format( _MAXIMUM_CANDIDATE )
# parser = argparse.ArgumentParser(description=msg)
# parser.add_argument(
# "server_address",
# help="The address of the server (e.g. localhost:51317)",
# default="localhost:35065"
# )
# args = parser.parse_args()
# primes = _calculate_primes(args.server_address)

start_main = time.time()
for _ in range(17): # Replace repeated calls with a loop
start_call = time.time()
primes = _calculate_primes("localhost:51317")
end_call = time.time()
print(f"Execution time for _calculate_primes: {end_call - start_call:.5f} seconds")
end_main = time.time()
print(primes)
print(f"Total execution time for main: {end_main - start_main:.5f} seconds")


if __name__ == "__main__":
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter("[PID %(process)d] %(message)s")
handler.setFormatter(formatter)
_LOGGER.addHandler(handler)
_LOGGER.setLevel(logging.INFO)
main()
Loading