Skip to content

Commit

Permalink
Merge pull request #43 from PowerLoom/feat/epoch_size_1
Browse files Browse the repository at this point in the history
Enhanced Pooler Efficiency for Smaller Epoch Sizes
  • Loading branch information
xadahiya authored Aug 30, 2023
2 parents 56e058b + e8ee252 commit 20b4604
Show file tree
Hide file tree
Showing 28 changed files with 727 additions and 405 deletions.
2 changes: 0 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@ FROM nikolaik/python-nodejs:python3.10-nodejs18
# Install the PM2 process manager for Node.js
RUN npm install pm2 -g

RUN pm2 install pm2-logrotate && pm2 set pm2-logrotate:compress true && pm2 set pm2-logrotate:retain 7

# Copy the application's dependencies files
COPY poetry.lock pyproject.toml ./

Expand Down
Binary file added out/snapshotter/modules/pooler/flow/flow.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
6 changes: 6 additions & 0 deletions pm2.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ module.exports = {
script : `poetry run python -m snapshotter.launch_process_hub_core`,
max_restarts: MAX_RESTART,
min_uptime: MIN_UPTIME,
error_file: "/dev/null",
out_file: "/dev/null",
env: {
NODE_ENV: NODE_ENV,
}
Expand All @@ -21,6 +23,8 @@ module.exports = {
script : `poetry run python -m snapshotter.gunicorn_core_launcher`,
max_restarts: MAX_RESTART,
min_uptime: MIN_UPTIME,
error_file: "/dev/null",
out_file: "/dev/null",
env: {
NODE_ENV: NODE_ENV,
GUNICORN_WORKERS: 1,
Expand All @@ -31,6 +35,8 @@ module.exports = {
script : `poetry run python -m snapshotter.auth.gunicorn_auth_entry_launcher`,
max_restarts: MAX_RESTART,
min_uptime: MIN_UPTIME,
error_file: "/dev/null",
out_file: "/dev/null",
env: {
NODE_ENV: NODE_ENV,
GUNICORN_WORKERS: 1,
Expand Down
511 changes: 253 additions & 258 deletions poetry.lock

Large diffs are not rendered by default.

16 changes: 8 additions & 8 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,25 @@ readme = "README.md"

[tool.poetry.dependencies]
python = "^3.8"
redis = "^4.4.0"
redis = "^4.6.0"
timeago = "^1.0.16"
typer = "^0.7.0"
psutil = "^5.9.4"
psutil = "^5.9.5"
importlib = "^1.0.4"
pika = "^1.3.1"
pika = "^1.3.2"
gunicorn = "^20.1.0"
aiohttp = "^3.8.3"
web3 = "^5.31.3"
aiohttp = "^3.8.5"
web3 = "^5.31.4"
async-limits = {git = "https://github.com/powerloom/limits.git"}
tenacity = "^8.1.0"
tenacity = "^8.2.2"
limits = "^2.8.0"
uvicorn = "^0.20.0"
frozendict = "^2.3.4"
frozendict = "^2.3.8"
multiaddr = "^0.0.9"
idna = "^3.4"
uvloop = "^0.17.0"
loguru = "^0.7.0"
httpx = "^0.24.0"
httpx = "^0.24.1"
fastapi = "^0.95.1"
ifps-client = {git = "https://[email protected]/PowerLoom/py-ipfs-client.git"}
aiorwlock = "^1.3.0"
Expand Down
24 changes: 5 additions & 19 deletions snapshotter/auth/gunicorn_auth_entry_launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,21 @@
import os
import sys

from loguru import logger

from snapshotter.auth.conf import auth_settings
from snapshotter.auth.server_entry import app
from snapshotter.utils.default_logger import FORMAT
from snapshotter.utils.default_logger import logger
from snapshotter.utils.gunicorn import InterceptHandler
from snapshotter.utils.gunicorn import StandaloneApplication
from snapshotter.utils.gunicorn import StubbedGunicornLogger

LOG_LEVEL = logging.getLevelName(os.environ.get('LOG_LEVEL', 'DEBUG'))
JSON_LOGS = True if os.environ.get('JSON_LOGS', '0') == '1' else False
LOG_LEVEL = logging.getLevelName(os.environ.get('LOG_LEVEL', 'DEBUG'))
WORKERS = int(os.environ.get('GUNICORN_WORKERS', '5'))


if __name__ == '__main__':
intercept_handler = InterceptHandler()
# logging.basicConfig(handlers=[intercept_handler], level=LOG_LEVEL)
# logging.root.handlers = [intercept_handler]
logging.root.setLevel(LOG_LEVEL)

seen = set()
Expand All @@ -35,20 +33,8 @@
seen.add(name.split('.')[0])
logging.getLogger(name).handlers = [intercept_handler]

logger.configure(
handlers=[
{
'sink': sys.stdout,
'serialize': JSON_LOGS,
'level': logging.DEBUG,
},
{
'sink': sys.stderr,
'serialize': JSON_LOGS,
'level': logging.ERROR,
},
],
)
logger.add(sys.stdout, format=FORMAT, level=LOG_LEVEL, serialize=JSON_LOGS)
logger.add(sys.stderr, format=FORMAT, level=logging.ERROR, serialize=JSON_LOGS)

options = {
'bind': f'{auth_settings.bind.host}:{auth_settings.bind.port}',
Expand Down
24 changes: 5 additions & 19 deletions snapshotter/gunicorn_core_launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,21 @@
import os
import sys

from loguru import logger

from snapshotter.core_api import app
from snapshotter.settings.config import settings
from snapshotter.utils.default_logger import FORMAT
from snapshotter.utils.default_logger import logger
from snapshotter.utils.gunicorn import InterceptHandler
from snapshotter.utils.gunicorn import StandaloneApplication
from snapshotter.utils.gunicorn import StubbedGunicornLogger

LOG_LEVEL = logging.getLevelName(os.environ.get('LOG_LEVEL', 'DEBUG'))
JSON_LOGS = True if os.environ.get('JSON_LOGS', '0') == '1' else False
WORKERS = int(os.environ.get('GUNICORN_WORKERS', '5'))
JSON_LOGS = True if os.environ.get('JSON_LOGS', '0') == '1' else False


if __name__ == '__main__':
intercept_handler = InterceptHandler()
# logging.basicConfig(handlers=[intercept_handler], level=LOG_LEVEL)
# logging.root.handlers = [intercept_handler]
logging.root.setLevel(LOG_LEVEL)

seen = set()
Expand All @@ -35,20 +33,8 @@
seen.add(name.split('.')[0])
logging.getLogger(name).handlers = [intercept_handler]

logger.configure(
handlers=[
{
'sink': sys.stdout,
'serialize': JSON_LOGS,
'level': logging.DEBUG,
},
{
'sink': sys.stderr,
'serialize': JSON_LOGS,
'level': logging.ERROR,
},
],
)
logger.add(sys.stdout, format=FORMAT, level=LOG_LEVEL, serialize=JSON_LOGS)
logger.add(sys.stderr, format=FORMAT, level=logging.ERROR, serialize=JSON_LOGS)

options = {
'bind': f'{settings.core_api.host}:{settings.core_api.port}',
Expand Down
1 change: 1 addition & 0 deletions snapshotter/init_rabbitmq.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import aio_pika
import pika

from snapshotter.settings.config import settings
Expand Down
23 changes: 23 additions & 0 deletions snapshotter/modules/pooler/flow.puml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
@startuml

AggregateProcessor -> ProtocolStateContractorRedis : Get project first epoch

alt calculating aggregate for the first time
AggregateProcessor -> ProtocolStateContractorRedis : Get required base snapshots and calculate aggregate from scratch
else calculating aggregate other than the first time
AggregateProcessor -> ProtocolStateContractorRedis: Get last Finalized Aggregate Snapshot
ProtocolStateContractorRedis -> AggregateProcessor: LastAggregateSnapshot
alt if last Finalized Aggregate Snapshot is not found
AggregateProcessor -> ProtocolStateContractorRedis : Get required base snapshots and calculate aggregate from scratch
else last Finalized Aggregate Snapshot is found
AggregateProcessor -> ProtocolStateContractorRedis : Get required (remaining) base snapshots
LastAggregateSnapshot -> LastAggregateSnapshot: Calculate aggregate from last Finalized Aggregate Snapshot by adding all missing base snapshots
end

AggregateProcessor -> ProtocolStateContractorRedis: Fetch tail snapshots for corresponding added snapshots and mark for removal from Aggregate Snapshot

LastAggregateSnapshot -> LastAggregateSnapshot: Remove marked tail snapshots from Aggregate Snapshot

LastAggregateSnapshot -> AggregateProcessor: Finalized Aggregate Snapshot
end
@enduml
Loading

0 comments on commit 20b4604

Please sign in to comment.