Compare commits

..

25 Commits

Author SHA1 Message Date
7b806075c7 Merge pull request 'fix' (#29) from master into dev
Reviewed-on: #29
2024-12-08 11:26:22 +03:00
2eb13d9443 Merge pull request 'master' (#28) from master into dev
Reviewed-on: #28
2024-12-08 11:22:27 +03:00
49634845a1 Merge pull request 'Update daemons/mailbox.py' (#27) from master into dev
Reviewed-on: #27
2024-12-02 22:39:10 +03:00
e02646a1a2 Merge pull request 'fix' (#26) from master into dev
Reviewed-on: #26
2024-12-02 21:57:09 +03:00
1b7e8686e4 Merge pull request 'fix' (#24) from master into dev
Reviewed-on: #24
2024-11-30 13:30:19 +03:00
cf12a0dca8 Merge pull request 'types' (#23) from master into dev
Reviewed-on: #23
2024-11-30 12:59:01 +03:00
0a97e56539 Merge pull request 'fix' (#21) from master into dev
Reviewed-on: #21
2024-11-29 20:31:45 +03:00
a5d3bd08c1 Merge pull request 'fix' (#20) from master into dev
Reviewed-on: #20
2024-11-29 20:25:38 +03:00
ad74b8de7a Merge pull request 'fix' (#18) from master into dev
Reviewed-on: #18
2024-11-28 23:09:59 +03:00
05d9cdc7b1 Merge pull request 'fix' (#17) from master into dev
Reviewed-on: #17
2024-11-28 22:58:47 +03:00
a209246513 Merge pull request 'fix' (#15) from master into dev
Reviewed-on: #15
2024-11-27 16:18:17 +03:00
0922b5a4a4 Merge pull request 'fix' (#14) from master into dev
Reviewed-on: #14
2024-11-27 16:14:35 +03:00
2ee74e70ac Merge pull request 'fix' (#13) from master into dev
Reviewed-on: #13
2024-11-27 14:46:14 +03:00
da93092232 Merge pull request 'fix' (#12) from master into dev
Reviewed-on: #12
2024-11-27 11:20:53 +03:00
60b933496f Merge pull request 'fix' (#11) from master into dev
Reviewed-on: #11
2024-11-27 11:15:30 +03:00
82b99ae803 Merge pull request 'fix' (#10) from master into dev
Reviewed-on: #10
2024-11-27 11:12:37 +03:00
c8f65a0ebb Merge pull request 'fix' (#9) from master into dev
Reviewed-on: #9
2024-11-27 11:10:47 +03:00
6401a40f11 Merge pull request 'fix' (#8) from master into dev
Reviewed-on: #8
2024-11-27 04:32:35 +03:00
32197fd699 Merge pull request 'fix' (#7) from master into dev
Reviewed-on: #7
2024-11-27 04:26:41 +03:00
e69ee8767a Merge pull request 'fix' (#6) from master into dev
Reviewed-on: #6
2024-11-27 04:19:26 +03:00
54f7581657 Merge pull request 'fix' (#5) from master into dev
Reviewed-on: #5
2024-11-27 04:14:58 +03:00
c6a2710087 Merge pull request 'fix' (#4) from master into dev
Reviewed-on: #4
2024-11-27 04:11:15 +03:00
42fc5552ab Merge pull request 'fix' (#3) from master into dev
Reviewed-on: #3
2024-11-27 04:09:16 +03:00
499eed49e0 Merge pull request 'fix' (#2) from master into dev
Reviewed-on: #2
2024-11-27 04:07:49 +03:00
349df7eb17 Merge pull request 'req' (#1) from master into dev
Reviewed-on: #1
2024-11-27 04:06:16 +03:00
10 changed files with 115 additions and 87 deletions

View File

@@ -22,6 +22,7 @@ services:
networks: networks:
- configurator - configurator
- queues-development - queues-development
- locks-development
environment: environment:
STAGE: "development" STAGE: "development"
command: mailbox command: mailbox
@@ -38,3 +39,5 @@ networks:
external: true external: true
queues-development: queues-development:
external: true external: true
locks-development:
external: true

View File

@@ -22,6 +22,7 @@ services:
networks: networks:
- configurator - configurator
- queues - queues
- locks
environment: environment:
STAGE: "production" STAGE: "production"
command: mailbox command: mailbox
@@ -38,3 +39,5 @@ networks:
external: true external: true
queues: queues:
external: true external: true
locks:
external: true

1
.gitignore vendored
View File

@@ -121,4 +121,3 @@ GitHub.sublime-settings
local_platform.json local_platform.json
*pb2* *pb2*
schemas

View File

@@ -4,5 +4,6 @@ WORKDIR /usr/src/app
COPY requirements.txt requirements.txt COPY requirements.txt requirements.txt
RUN pip install -r requirements.txt RUN pip install -r requirements.txt
COPY . . COPY . .
RUN make gen
ENV PYTHONUNBUFFERED 1 ENV PYTHONUNBUFFERED 1
ENTRYPOINT ["python", "main.py"] ENTRYPOINT ["python", "main.py"]

2
Makefile Normal file
View File

@@ -0,0 +1,2 @@
gen:
python -m grpc_tools.protoc --proto_path schemas --python_out=. --pyi_out=. --grpc_python_out=. ./schemas/tasks.proto

View File

@@ -1,3 +1,18 @@
import os
import grpc
import tasks_pb2_grpc
stage = os.getenv("STAGE", 'local')
if stage == 'local':
QUEUES_URL = 'localhost:50051'
else:
QUEUES_URL = 'queues-grpc:50051'
class Daemon: class Daemon:
def __init__(self):
self.channel = grpc.insecure_channel(QUEUES_URL)
self.stub = tasks_pb2_grpc.TasksStub(channel=self.channel)
def execute(self): def execute(self):
raise NotImplemented raise NotImplemented

View File

@@ -1,7 +1,6 @@
import telebot import telebot
import multiprocessing import threading
import time import time
import json
from daemons import base from daemons import base
from utils import platform from utils import platform
@@ -10,37 +9,45 @@ from utils import queues
class Daemon(base.Daemon): class Daemon(base.Daemon):
def __init__(self): def __init__(self):
self.processes: dict[str, multiprocessing.Process|None] = {} self.telegram_bots: dict[str, dict[str, telebot.TeleBot|None]] = {}
self.threads: dict[str, dict[str, threading.Thread|None]] = {}
def execute(self): def execute(self):
while True: while True:
bots = platform.platform_client.get_config('bots') bots = platform.platform_client.get_config('bots')
for project_name, project in bots.items(): for project_name, project in bots.items():
if project_name not in self.telegram_bots:
self.telegram_bots[project_name] = {}
self.threads[project_name] = {}
for bot_name, bot_info in project.items(): for bot_name, bot_info in project.items():
key = f'{project_name}_{bot_name}' if bot_name not in self.telegram_bots[project_name]:
proc = self.processes.get(key) self.telegram_bots[project_name][bot_name] = None
self.threads[project_name][bot_name] = None
bot = self.telegram_bots[project_name][bot_name]
if bot_info.get('poll_enabled'): if bot_info.get('poll_enabled'):
if proc and proc.is_alive(): if bot is not None and self.threads[project_name][bot_name].is_alive():
print(f'process for {project_name} {bot_name} is alive') print(f'process for {project_name} {bot_name} is alive')
continue continue
print(f'starting process for {project_name} {bot_name}') print(f'starting process for {project_name} {bot_name}')
process = multiprocessing.Process(target=self.start_polling, args=(bot_info['secrets']['telegram_token'], bot_info['queue'])) bot = telebot.TeleBot(bot_info['secrets']['telegram_token'])
process.start() thread = self.start_polling(bot, bot_info['queue'])
self.processes[key] = process self.telegram_bots[project_name][bot_name] = bot
self.threads[project_name][bot_name] = thread
print(f'started process for {project_name} {bot_name}') print(f'started process for {project_name} {bot_name}')
else: else:
if proc is None: if bot is None:
print(f'process for {project_name} {bot_name} is not alive') print(f'process for {project_name} {bot_name} is not alive')
continue continue
print(f'terminating process for {project_name} {bot_name}') print(f'terminating process for {project_name} {bot_name}')
proc.terminate() bot.stop_bot()
self.processes[key] = None self.telegram_bots[project_name][bot_name] = None
print(f'terminated process for {project_name} {bot_name}') print(f'terminated process for {project_name} {bot_name}')
time.sleep(10) time.sleep(10)
def start_polling(self, token: str, queue: str): def start_polling(self, bot: telebot.TeleBot, queue: str) -> threading.Thread:
bot = telebot.TeleBot(token)
@bot.message_handler(content_types=['audio', 'photo', 'voice', 'video', 'document', 'animation', 'text', 'location', 'contact', 'sticker', 'video_note']) @bot.message_handler(content_types=['audio', 'photo', 'voice', 'video', 'document', 'animation', 'text', 'location', 'contact', 'sticker', 'video_note'])
def do_action(message: telebot.types.Message): def do_action(message: telebot.types.Message):
queues.set_task(queue, message.json, 1) queues.set_task(self.stub, queue, message.json, 1)
bot.polling() thread = threading.Thread(target=bot.polling)
thread.start()
return thread

View File

@@ -1,10 +1,14 @@
annotated-types==0.7.0 annotated-types==0.7.0
certifi==2024.12.14 certifi==2024.8.30
charset-normalizer==3.4.0 charset-normalizer==3.4.0
grpcio==1.68.1
grpcio-tools==1.68.1
idna==3.10 idna==3.10
pydantic==2.10.4 protobuf==5.29.1
pydantic_core==2.27.2 pydantic==2.10.2
pydantic_core==2.27.1
pyTelegramBotAPI==4.1.1 pyTelegramBotAPI==4.1.1
requests==2.32.3 requests==2.32.3
setuptools==75.6.0
typing_extensions==4.12.2 typing_extensions==4.12.2
urllib3==2.3.0 urllib3==2.2.3

40
schemas/tasks.proto Normal file
View File

@@ -0,0 +1,40 @@
syntax = "proto3";
package queues;
import "google/protobuf/struct.proto";
service Tasks {
rpc Put (PutRequest) returns (EmptyResponse) {}
rpc Take (TakeRequest) returns (TakeResponse) {}
rpc Finish (FinishRequest) returns (EmptyResponse) {}
}
message Task {
string id = 1;
int64 attempt = 2;
google.protobuf.Struct payload = 3;
}
message PutRequest {
string queue = 1;
int64 seconds_to_execute = 2;
optional int64 delay = 3;
google.protobuf.Struct payload = 4;
}
message TakeRequest {
string queue = 1;
}
message FinishRequest {
string id = 1;
}
message EmptyResponse {}
message TakeResponse {
optional Task task = 1;
}

View File

@@ -1,19 +1,6 @@
from concurrent.futures import ThreadPoolExecutor
import datetime
import json
import os
import traceback
import uuid
import zoneinfo
import requests
import time import time
import tasks_pb2_grpc
import tasks_pb2
stage = os.getenv("STAGE", 'local')
if stage == 'local':
QUEUES_URL = 'http://localhost:1239'
else:
QUEUES_URL = 'http://queues:1239'
class QueuesException(Exception): class QueuesException(Exception):
@@ -21,55 +8,22 @@ class QueuesException(Exception):
class TasksHandlerMixin: class TasksHandlerMixin:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.executor = ThreadPoolExecutor(max_workers=1)
def _send_metric(self, start: datetime.datetime, end: datetime.datetime, success: bool):
def send():
requests.post(f'{QUEUES_URL}/api/v1/metric', json={
'service': 'botalka',
'queue': self.queue_name,
'success': success,
'timestamp': start.strftime("%Y-%m-%dT%H:%M:%S") + "Z",
"success": success,
"execution_time_ms": (end - start).microseconds // 1000,
"environment": stage,
})
self.executor.submit(send)
def poll(self): def poll(self):
while True: while True:
try: response: tasks_pb2.TakeResponse = self.stub.Take(tasks_pb2.TakeRequest(queue=self.queue_name))
response = requests.get(f'{QUEUES_URL}/api/v1/take', headers={'queue': self.queue_name}).json() task: tasks_pb2.Task = response.task
except requests.JSONDecodeError:
print('Unable to decode json')
time.sleep(3)
continue
task = response.get('task')
if not task: if not task:
time.sleep(0.2) time.sleep(0.2)
continue continue
start = datetime.datetime.now(zoneinfo.ZoneInfo("Europe/Moscow"))
try: try:
print(f'process task with id {task["id"]}, attempt {task["attempt"]}') self.process(task.payload)
self.process(task['payload'])
success = True
except Exception as exc: except Exception as exc:
print(f'Error processing message id={task["id"]}, payload={task["payload"]}, exc={exc}') print(f'Error processing message id={task["id"]}, payload={task["payload"]}, exc={exc}')
traceback.print_stack() continue
success = False try:
end = datetime.datetime.now(zoneinfo.ZoneInfo("Europe/Moscow")) self.stub.Finish(tasks_pb2.FinishRequest(id=task.id))
if success: except:
try: print(f'Failed to finish task id={task.id}')
resp = requests.post(f'{QUEUES_URL}/api/v1/finish', json={'id': task['id']})
if resp.status_code != 202:
raise QueuesException
print(f'finish task with id {task["id"]}')
except:
print(f'Failed to finish task id={task["id"]}')
self._send_metric(start, end, success)
@property @property
def queue_name(self): def queue_name(self):
@@ -78,12 +32,12 @@ class TasksHandlerMixin:
def process(self, payload): def process(self, payload):
raise NotImplemented raise NotImplemented
def set_task(stub: tasks_pb2_grpc.TasksStub, queue_name: str, payload: dict, seconds_to_execute: int, delay: int|None = None):
def set_task(queue_name: str, payload: dict, seconds_to_execute: int, delay: int|None = None): stub.Put(
resp = requests.post(f'{QUEUES_URL}/api/v1/put', headers={'queue': queue_name}, json={ tasks_pb2.PutRequest(
'payload': payload, queue=queue_name,
'seconds_to_execute': seconds_to_execute, seconds_to_execute=seconds_to_execute,
'delay': delay, delay=delay,
}) payload=payload
if resp.status_code != 202: )
raise QueuesException )