Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Docker Integration #22

Merged
merged 7 commits into from
Jun 5, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file removed .DS_Store
Binary file not shown.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ env/*
build/*
cyberhead.egg-info/*
dist/*
.DS_Store
13 changes: 13 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
FROM ubuntu:latest

WORKDIR /app
ADD ./cyberhead /app

RUN apt-get update
RUN apt-get install -y python3 python3-pip telnet curl make g++ libssl-dev git
RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
RUN echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
RUN apt-get update
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get install -y yarn
RUN pip3 install -r ./requirements.txt
2 changes: 1 addition & 1 deletion cyberhead/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import os


engine = create_engine(f"mysql+pymysql://root:root@localhost:3306/cyberhead").connect()
engine = create_engine(f"mysql+pymysql://{os.getenv('CH_DB_USER')}:{os.getenv('CH_DB_PASSWORD')}@{os.getenv('CH_DB_HOST')}:3306/{os.getenv('CH_DB_NAME')}").connect()

db = peewee.MySQLDatabase(os.getenv('CH_DB_NAME'),
host=os.getenv('CH_DB_HOST'),
Expand Down
2 changes: 1 addition & 1 deletion cyberhead/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,4 +143,4 @@ def heatmap():
"""
if __name__ == '__main__':
run_loader()
app.run(debug=True)
app.run(debug=True, host='0.0.0.0')
12 changes: 12 additions & 0 deletions cyberhead/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
amqp==2.5.2
asn1crypto==0.24.0
Backtesting==0.1.7
billiard==3.6.3.0
bokeh==2.0.2
Expand All @@ -8,15 +9,21 @@ cffi==1.14.0
chardet==3.0.4
click==7.1.2
cryptography==2.9.2
cycler==0.10.0
dataclasses==0.6
Flask==1.1.2
Flask-Cors==3.0.8
idna==2.8
importlib-metadata==1.6.0
itsdangerous==1.1.0
Jinja2==2.11.2
keyring==10.6.0
keyrings.alt==3.0
kiwisolver==1.2.0
kombu==4.6.8
lxml==4.5.0
MarkupSafe==1.1.1
matplotlib==3.2.1
multitasking==0.0.9
numpy==1.18.1
packaging==20.3
Expand All @@ -25,14 +32,19 @@ pandas-datareader==0.8.1
peewee==3.13.3
Pillow==7.1.2
pycparser==2.20
pycrypto==2.6.1
pydantic==1.4
PyMySQL==0.9.3
pyOpenSSL==19.1.0
pyparsing==2.4.7
python-dateutil==2.8.1
pytz==2020.1
pyxdg==0.25
PyYAML==5.3.1
requests==2.22.0
scipy==1.4.1
seaborn==0.10.1
SecretStorage==2.3.1
six==1.14.0
SQLAlchemy==1.3.17
starlette==0.12.9
Expand Down
2 changes: 1 addition & 1 deletion cyberhead/tasker.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
Celery Initialization
If your broker instance is located out of localhost replace replace it below, or set the propper URL if you're lookign to use Redis.
'''
app = Celery('tasker', broker="amqp://localhost//")
app = Celery('tasker', broker="amqp://rabbit//")


'''
Expand Down
42 changes: 42 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
version: '3'

services:
cyberhead-mysql:
container_name: cyberhead-mysql
image: mysql
environment:
MYSQL_ROOT_PASSWORD: root
MYSQL_DATABASE: cyberhead
ports:
- "3306:3306"
restart: unless-stopped
cyberhead-rmq:
container_name: cyberhead-rmq
image: 'rabbitmq:3.6-management-alpine'
ports:
- '5672:5672'
- '15672:15672'
restart: unless-stopped
cyberhead:
container_name: cyberhead
environment:
CH_DB_NAME: cyberhead
CH_DB_HOST: database
CH_DB_USER: root
CH_DB_PASSWORD: root
build:
context: ./
dockerfile: ./Dockerfile
command: sh -c "tail -f /dev/null"
volumes:
- .:/app
depends_on:
- cyberhead-mysql
- cyberhead-rmq
ports:
- 5000:5000
- 3000:3000
restart: always
links:
- "cyberhead-rmq:rabbit"
- "cyberhead-mysql:database"
34 changes: 0 additions & 34 deletions docker/Dockerfile

This file was deleted.

43 changes: 0 additions & 43 deletions docker/docker-compose.yml

This file was deleted.

2 changes: 2 additions & 0 deletions init.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
python3 ./cyberhead/database.py
python3 ./startup.py
45 changes: 45 additions & 0 deletions startup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from cyberhead.database import DataSet
from cyberhead.modules.datasets import yahoo
from sqlalchemy import create_engine
import numpy as np
import pandas
import os

engine = create_engine(f"mysql+pymysql://{os.getenv('CH_DB_USER')}:{os.getenv('CH_DB_PASSWORD')}@{os.getenv('CH_DB_HOST')}:3306/{os.getenv('CH_DB_NAME')}").connect()

def allTimeFetch(ticker: str, period: str, interval: str, dataset_id: int):
data = yahoo.download_historical(ticker, period, interval)
data.to_csv('cyberhead/tmp/{}.csv'.format(ticker))
read_export = pandas.read_csv('cyberhead/tmp/{}.csv'.format(ticker))
read_export.drop('Adj Close',axis=1, inplace=True)
read_export.columns = ['datetime', 'open_price', 'high_price', 'low_price', 'closing_price', 'volume']
read_export['dataset_id'] = dataset_id
read_export.to_sql('history', con=engine, if_exists='append', index = False)
return ticker


timeseries_ohlc = [
{
"identifier": "Google 1D",
"reference_symbol":"GOOG1D",
"ticker": "GOOG",
"source": "Yahoo",
"frecuency":"1D"
},
{
"identifier": "Apple 1D",
"reference_symbol":"AAPL1D",
"ticker": "AAPL",
"source": "Yahoo",
"frecuency":"1D"
}
]

for data in timeseries_ohlc:
data_set = DataSet.create(identifier=data["identifier"],
reference_symbol=data["reference_symbol"],
symbol=data["ticker"],
source=data["source"],
frecuency=data["frecuency"])
allTimeFetch(data["ticker"],"max", "1d",data_set.id)
print(f'{data["reference_symbol"]} loaded.')