Compare commits

..

24 Commits

Author SHA1 Message Date
90e194fa50 Merge pull request 'fix' (#38) from master into dev
Reviewed-on: #38
2024-12-08 22:06:48 +03:00
28cc1bfe1d Merge pull request 'master' (#37) from master into dev
Reviewed-on: #37
2024-12-08 21:12:59 +03:00
4e42f0927e Merge pull request 'master' (#34) from master into dev
Reviewed-on: #34
2024-12-03 14:05:52 +03:00
289701cbc0 Merge pull request 'fix' (#31) from master into dev
Reviewed-on: #31
2024-11-27 18:42:03 +03:00
e1dbfb9e51 Merge pull request 'fix' (#30) from master into dev
Reviewed-on: #30
2024-11-27 16:20:25 +03:00
b23ab7b97f Merge pull request 'send' (#29) from master into dev
Reviewed-on: #29
2024-11-27 16:02:25 +03:00
09857b47b7 Merge pull request 'fix' (#27) from master into dev
Reviewed-on: #27
2024-11-27 02:36:32 +03:00
99ceb4c716 Merge pull request 'fix' (#26) from master into dev
Reviewed-on: #26
2024-11-27 02:33:29 +03:00
3d195df1a7 Merge pull request 'fix' (#25) from master into dev
Reviewed-on: #25
2024-11-27 02:21:08 +03:00
0e836b1d92 Merge pull request 'fix' (#24) from master into dev
Reviewed-on: #24
2024-11-27 02:17:49 +03:00
ff9fe98a3e Merge pull request 'fix' (#22) from master into dev
Reviewed-on: #22
2024-11-24 18:10:48 +03:00
2cc67efebb Merge pull request 'configurator' (#21) from master into dev
Reviewed-on: #21
2024-11-24 18:09:11 +03:00
50d408554e Merge pull request 'master' (#20) from master into dev
Reviewed-on: #20
2024-11-23 22:38:29 +03:00
589eb70f69 Merge pull request 'fix' (#17) from queues into dev
Reviewed-on: #17
2024-11-22 01:32:48 +03:00
5c35987d15 Merge pull request 'fix' (#14) from queues into dev
Reviewed-on: #14
2024-11-17 13:15:40 +03:00
09f2a267b2 Merge pull request 'fix' (#13) from queues into dev
Reviewed-on: #13
2024-11-17 13:12:16 +03:00
5042df4874 Merge pull request 'fix' (#12) from queues into dev
Reviewed-on: #12
2024-11-17 13:09:31 +03:00
7fd853f300 Merge pull request 'fix' (#11) from queues into dev
Reviewed-on: #11
2024-11-17 13:06:04 +03:00
6a0471dc15 Merge pull request 'add' (#10) from queues into dev
Reviewed-on: #10
2024-11-17 13:01:49 +03:00
cc290465a1 Merge pull request 'queues' (#9) from queues into dev
Reviewed-on: #9
2024-11-17 12:58:54 +03:00
1d0abfed03 Merge pull request 'fix' (#6) from master into dev
Reviewed-on: #6
2024-10-11 06:50:59 +03:00
e17c5ab7fe Merge pull request 'master' (#5) from master into dev
Reviewed-on: #5
2024-10-11 06:49:45 +03:00
ff7f0ffa91 Merge pull request 'fix' (#3) from master into dev
Reviewed-on: #3
2024-10-11 06:13:49 +03:00
71a12ef77b Merge pull request 'fix' (#2) from master into dev
All checks were successful
Build and push / Build (push) Successful in 5s
Build and push / Push (push) Successful in 8s
Reviewed-on: #2
2024-10-11 05:46:11 +03:00
8 changed files with 42 additions and 78 deletions

BIN
.DS_Store vendored

Binary file not shown.

View File

@@ -6,13 +6,12 @@ services:
image: mathwave/sprint-repo:pizda-bot
command: worker
environment:
MONGO_HOST: "mongo"
MONGO_HOST: "mongo.develop.sprinthub.ru"
MONGO_PASSWORD: $MONGO_PASSWORD_DEV
STAGE: "development"
networks:
- queues-development
- configurator
- mongo-development
deploy:
mode: replicated
restart_policy:
@@ -25,12 +24,11 @@ services:
image: mathwave/sprint-repo:pizda-bot
command: api
environment:
MONGO_HOST: "mongo"
MONGO_HOST: "mongo.develop.sprinthub.ru"
MONGO_PASSWORD: $MONGO_PASSWORD_DEV
STAGE: "development"
networks:
- common-infra-nginx-development
- mongo-development
deploy:
mode: replicated
restart_policy:
@@ -46,5 +44,3 @@ networks:
external: true
configurator:
external: true
mongo-development:
external: true

View File

@@ -6,13 +6,12 @@ services:
image: mathwave/sprint-repo:pizda-bot
command: worker
environment:
MONGO_HOST: "mongo"
MONGO_HOST: "mongo.sprinthub.ru"
MONGO_PASSWORD: $MONGO_PASSWORD_PROD
STAGE: "production"
networks:
- queues
- configurator
- mongo
deploy:
mode: replicated
restart_policy:
@@ -25,11 +24,10 @@ services:
image: mathwave/sprint-repo:pizda-bot
command: api
environment:
MONGO_HOST: "mongo"
MONGO_HOST: "mongo.sprinthub.ru"
MONGO_PASSWORD: $MONGO_PASSWORD_PROD
networks:
- common-infra-nginx
- mongo
deploy:
mode: replicated
restart_policy:
@@ -45,5 +43,3 @@ networks:
external: true
configurator:
external: true
mongo:
external: true

View File

@@ -4,5 +4,6 @@ WORKDIR /usr/src/app
COPY requirements.txt requirements.txt
RUN pip install -r requirements.txt
COPY . .
RUN make gen
ENV PYTHONUNBUFFERED 1
ENTRYPOINT ["python", "main.py"]

7
Makefile Normal file
View File

@@ -0,0 +1,7 @@
gen:
pip install grpcio grpcio-tools
curl https://platform.sprinthub.ru/generator >> generator.py
python generator.py
rm generator.py
run:
python ./server.py

View File

@@ -1,4 +1,6 @@
import os
import grpc
from queues import tasks_pb2_grpc
stage = os.getenv("STAGE", 'local')
@@ -9,5 +11,8 @@ else:
class Daemon:
def __init__(self):
self.channel = grpc.insecure_channel(QUEUES_URL)
self.stub = tasks_pb2_grpc.TasksStub(channel=self.channel)
def execute(self):
raise NotImplemented

View File

@@ -35,10 +35,6 @@ def get_answers():
return client.get_config('answers')
def get_ignored_users():
return client.get_config('ignored_users')['users']
def get_replies():
return client.get_config('replies')
@@ -53,6 +49,7 @@ class Daemon(base.Daemon, queues.TasksHandlerMixin):
def reply(self, text: str, chat_id: int, message_id: int):
queues.set_task(
self.stub,
'botalka_mailbox',
{
'project': 'pizda-bot',
@@ -68,6 +65,7 @@ class Daemon(base.Daemon, queues.TasksHandlerMixin):
def send(self, text: str, chat_id: int):
queues.set_task(
self.stub,
'botalka_mailbox',
{
'project': 'pizda-bot',
@@ -155,6 +153,6 @@ class Daemon(base.Daemon, queues.TasksHandlerMixin):
else:
return
ans = get_answers().get(convert_text)
if ans is not None and randrange(1, 101) <= info["probability"] and message.from_user.id not in get_ignored_users():
if ans is not None and randrange(1, 101) <= info["probability"]:
self.reply(ans, message.chat.id, message.message_id)
mongo.inc(message.from_user.username, message.chat.id)

View File

@@ -1,16 +1,8 @@
from concurrent.futures import ThreadPoolExecutor
import datetime
import os
import zoneinfo
import requests
import time
from queues import tasks_pb2_grpc
from queues import tasks_pb2
stage = os.getenv("STAGE", 'local')
if stage == 'local':
QUEUES_URL = 'http://localhost:1239'
else:
QUEUES_URL = 'http://queues:1239'
from google.protobuf import json_format
class QueuesException(Exception):
@@ -18,54 +10,23 @@ class QueuesException(Exception):
class TasksHandlerMixin:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.executor = ThreadPoolExecutor(max_workers=1)
def _send_metric(self, start: datetime.datetime, end: datetime.datetime, success: bool):
def send():
requests.post(f'{QUEUES_URL}/api/v1/metric', json={
'service': 'botalka',
'queue': self.queue_name,
'success': success,
'timestamp': start.strftime("%Y-%m-%dT%H:%M:%S") + "Z",
"success": success,
"execution_time_ms": (end - start).microseconds // 1000,
"environment": stage,
})
self.executor.submit(send)
def poll(self):
while True:
try:
response = requests.get(f'{QUEUES_URL}/api/v1/take', headers={'queue': self.queue_name}).json()
except requests.JSONDecodeError:
print('Unable to decode json')
time.sleep(3)
continue
task = response.get('task')
response: tasks_pb2.TakeResponse = self.stub.Take(tasks_pb2.TakeRequest(queue=self.queue_name))
task = response.task
if not task:
time.sleep(0.2)
continue
start = datetime.datetime.now(zoneinfo.ZoneInfo("Europe/Moscow"))
try:
print(f'process task with id {task["id"]}, attempt {task["attempt"]}')
self.process(task['payload'])
success = True
payload = json_format.MessageToDict(task.payload)
self.process(payload)
except Exception as exc:
print(f'Error processing message id={task["id"]}, payload={task["payload"]}, exc={exc}')
success = False
end = datetime.datetime.now(zoneinfo.ZoneInfo("Europe/Moscow"))
if success:
try:
resp = requests.post(f'{QUEUES_URL}/api/v1/finish', json={'id': task['id']})
if resp.status_code != 202:
raise QueuesException
print(f'finish task with id {task["id"]}')
except:
print(f'Failed to finish task id={task["id"]}')
self._send_metric(start, end, success)
print(f'Error processing message id={task.id}, payload={payload}, exc={exc}')
continue
try:
self.stub.Finish(tasks_pb2.FinishRequest(id=task.id))
except:
print(f'Failed to finish task id={task.id}')
@property
def queue_name(self):
@@ -74,12 +35,12 @@ class TasksHandlerMixin:
def process(self, payload):
raise NotImplemented
def set_task(queue_name: str, payload: dict, seconds_to_execute: int, delay: int|None = None):
resp = requests.post(f'{QUEUES_URL}/api/v1/put', headers={'queue': queue_name}, json={
'payload': payload,
'seconds_to_execute': seconds_to_execute,
'delay': delay,
})
if resp.status_code != 202:
raise QueuesException
def set_task(stub: tasks_pb2_grpc.TasksStub, queue_name: str, payload: dict, seconds_to_execute: int, delay: int|None = None):
stub.Put(
tasks_pb2.PutRequest(
queue=queue_name,
seconds_to_execute=seconds_to_execute,
delay=delay,
payload=payload
)
)