diff --git a/Dockerfile b/Dockerfile index f19ca2b1..aec9282e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.1-alpine +FROM python:3.10.5-alpine ENV DEBUG="True" \ DATA_FOLDER="/config" \ diff --git a/README.md b/README.md index dd25a3ec..de4e654b 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ ecosystem into InfluxDB using Grafana for a frontend Requirements: * [Python 3.6.7+](https://www.python.org/downloads/release/python-367/) * [Python3-pip](https://pip.pypa.io/en/stable/installing/) -* [InfluxDB 1.8.x](https://www.influxdata.com/) +* [InfluxDB 1.8.x or 2.0.x](https://www.influxdata.com/) * [Grafana](https://grafana.com/)

diff --git a/Varken.py b/Varken.py index 3641cbc4..493a4bf8 100644 --- a/Varken.py +++ b/Varken.py @@ -1,19 +1,21 @@ import platform import schedule +import distro from time import sleep from queue import Queue from sys import version from threading import Thread from os import environ as env from os import access, R_OK, getenv -from distro import linux_distribution from os.path import isdir, abspath, dirname, join from argparse import ArgumentParser, RawTextHelpFormatter from logging import getLogger, StreamHandler, Formatter, DEBUG + # Needed to check version of python from varken import structures # noqa from varken.ombi import OmbiAPI +from varken.overseerr import OverseerrAPI from varken.unifi import UniFiAPI from varken import VERSION, BRANCH, BUILD_DATE from varken.sonarr import SonarrAPI @@ -21,13 +23,14 @@ from varken.lidarr import LidarrAPI from varken.iniparser import INIParser from varken.dbmanager import DBManager +from varken.influxdb2manager import InfluxDB2Manager from varken.helpers import GeoIPHandler from varken.tautulli import TautulliAPI from varken.sickchill import SickChillAPI from varken.varkenlogger import VarkenLogger -PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x) +PLATFORM_LINUX_DISTRO = ' '.join(distro.id() + distro.version() + distro.name()) def thread(job, **kwargs): @@ -90,7 +93,15 @@ def thread(job, **kwargs): vl.logger.info("Varken v%s-%s %s", VERSION, BRANCH, BUILD_DATE) CONFIG = INIParser(DATA_FOLDER) - DBMANAGER = DBManager(CONFIG.influx_server) + + if CONFIG.influx2_enabled: + # Use INFLUX version 2 + vl.logger.info('Using INFLUXDBv2') + DBMANAGER = InfluxDB2Manager(CONFIG.influx_server) + else: + vl.logger.info('Using INFLUXDB') + DBMANAGER = DBManager(CONFIG.influx_server) + QUEUE = Queue() if CONFIG.sonarr_enabled: @@ -156,6 +167,18 @@ def thread(job, **kwargs): at_time = schedule.every(server.issue_status_run_seconds).seconds at_time.do(thread, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id)) + if CONFIG.overseerr_enabled: + for server in CONFIG.overseerr_servers: + OVERSEER = OverseerrAPI(server, DBMANAGER) + if server.get_request_total_counts: + at_time = schedule.every(server.request_total_run_seconds).seconds + at_time.do(thread, OVERSEER.get_request_counts).tag("overseerr-{}-get_request_counts" + .format(server.id)) + if server.num_latest_requests_to_fetch > 0: + at_time = schedule.every(server.num_latest_requests_seconds).seconds + at_time.do(thread, OVERSEER.get_latest_requests).tag("overseerr-{}-get_latest_requests" + .format(server.id)) + if CONFIG.sickchill_enabled: for server in CONFIG.sickchill_servers: SICKCHILL = SickChillAPI(server, DBMANAGER) @@ -171,7 +194,8 @@ def thread(job, **kwargs): # Run all on startup SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled, - CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled] + CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled, + CONFIG.overseerr_enabled] if not [enabled for enabled in SERVICES_ENABLED if enabled]: vl.logger.error("All services disabled. Exiting") exit(1) diff --git a/data/varken.example.ini b/data/varken.example.ini index fa072cff..93d9ec22 100644 --- a/data/varken.example.ini +++ b/data/varken.example.ini @@ -3,10 +3,12 @@ sonarr_server_ids = 1,2 radarr_server_ids = 1,2 lidarr_server_ids = false tautulli_server_ids = 1 -ombi_server_ids = 1 +ombi_server_ids = false +overseerr_server_ids = 1 sickchill_server_ids = false unifi_server_ids = false maxmind_license_key = xxxxxxxxxxxxxxxx +influx2_enabled = false [influxdb] url = influxdb.domain.tld @@ -16,6 +18,15 @@ verify_ssl = false username = root password = root +[influx2] +url = influxdb2.domain.tld +org = ORG +token = TOKEN +timeout = 10000 +ssl = false +verify_ssl = false +bucket = varken + [tautulli-1] url = tautulli.domain.tld:8181 fallback_ip = 1.1.1.1 @@ -95,6 +106,17 @@ request_total_run_seconds = 300 get_issue_status_counts = true issue_status_run_seconds = 300 +[overseerr-1] +url = overseerr.domain.tld +apikey = xxxxxxxxxxxxxxxx +ssl = false +verify_ssl = false +get_request_total_counts = true +request_total_run_seconds = 30 +get_latest_requests = true +num_latest_requests_to_fetch = 10 +num_latest_requests_seconds = 30 + [sickchill-1] url = sickchill.domain.tld:8081 apikey = xxxxxxxxxxxxxxxx diff --git a/docker-compose.yml b/docker-compose.yml index a3cb252b..04bfb965 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,7 +6,7 @@ services: influxdb: hostname: influxdb container_name: influxdb - image: influxdb + image: influxdb:1.8 networks: - internal volumes: @@ -22,91 +22,6 @@ services: - /path/to/docker-varken/config-folder:/config environment: - TZ=America/Chicago - - VRKN_GLOBAL_SONARR_SERVER_IDS=1,2 - - VRKN_GLOBAL_RADARR_SERVER_IDS=1,2 - - VRKN_GLOBAL_LIDARR_SERVER_IDS=false - - VRKN_GLOBAL_TAUTULLI_SERVER_IDS=1 - - VRKN_GLOBAL_OMBI_SERVER_IDS=1 - - VRKN_GLOBAL_SICKCHILL_SERVER_IDS=false - - VRKN_GLOBAL_UNIFI_SERVER_IDS=false - - VRKN_GLOBAL_MAXMIND_LICENSE_KEY=xxxxxxxxxxxxxxxx - - VRKN_INFLUXDB_URL=influxdb.domain.tld - - VRKN_INFLUXDB_PORT=8086 - - VRKN_INFLUXDB_SSL=false - - VRKN_INFLUXDB_VERIFY_SSL=false - - VRKN_INFLUXDB_USERNAME=root - - VRKN_INFLUXDB_PASSWORD=root - - VRKN_TAUTULLI_1_URL=tautulli.domain.tld:8181 - - VRKN_TAUTULLI_1_FALLBACK_IP=1.1.1.1 - - VRKN_TAUTULLI_1_APIKEY=xxxxxxxxxxxxxxxx - - VRKN_TAUTULLI_1_SSL=false - - VRKN_TAUTULLI_1_VERIFY_SSL=false - - VRKN_TAUTULLI_1_GET_ACTIVITY=true - - VRKN_TAUTULLI_1_GET_ACTIVITY_RUN_SECONDS=30 - - VRKN_TAUTULLI_1_GET_STATS=true - - VRKN_TAUTULLI_1_GET_STATS_RUN_SECONDS=3600 - - VRKN_SONARR_1_URL=sonarr1.domain.tld:8989 - - VRKN_SONARR_1_APIKEY=xxxxxxxxxxxxxxxx - - VRKN_SONARR_1_SSL=false - - VRKN_SONARR_1_VERIFY_SSL=false - - VRKN_SONARR_1_MISSING_DAYS=7 - - VRKN_SONARR_1_MISSING_DAYS_RUN_SECONDS=300 - - VRKN_SONARR_1_FUTURE_DAYS=1 - - VRKN_SONARR_1_FUTURE_DAYS_RUN_SECONDS=300 - - VRKN_SONARR_1_QUEUE=true - - VRKN_SONARR_1_QUEUE_RUN_SECONDS=300 - - VRKN_SONARR_2_URL=sonarr2.domain.tld:8989 - - VRKN_SONARR_2_APIKEY=yyyyyyyyyyyyyyyy - - VRKN_SONARR_2_SSL=false - - VRKN_SONARR_2_VERIFY_SSL=false - - VRKN_SONARR_2_MISSING_DAYS=7 - - VRKN_SONARR_2_MISSING_DAYS_RUN_SECONDS=300 - - VRKN_SONARR_2_FUTURE_DAYS=1 - - VRKN_SONARR_2_FUTURE_DAYS_RUN_SECONDS=300 - - VRKN_SONARR_2_QUEUE=true - - VRKN_SONARR_2_QUEUE_RUN_SECONDS=300 - - VRKN_RADARR_1_URL=radarr1.domain.tld - - VRKN_RADARR_1_APIKEY=xxxxxxxxxxxxxxxx - - VRKN_RADARR_1_SSL=false - - VRKN_RADARR_1_VERIFY_SSL=false - - VRKN_RADARR_1_QUEUE=true - - VRKN_RADARR_1_QUEUE_RUN_SECONDS=300 - - VRKN_RADARR_1_GET_MISSING=true - - VRKN_RADARR_1_GET_MISSING_RUN_SECONDS=300 - - VRKN_RADARR_2_URL=radarr2.domain.tld - - VRKN_RADARR_2_APIKEY=yyyyyyyyyyyyyyyy - - VRKN_RADARR_2_SSL=false - - VRKN_RADARR_2_VERIFY_SSL=false - - VRKN_RADARR_2_QUEUE=true - - VRKN_RADARR_2_QUEUE_RUN_SECONDS=300 - - VRKN_RADARR_2_GET_MISSING=true - - VRKN_RADARR_2_GET_MISSING_RUN_SECONDS=300 - - VRKN_LIDARR_1_URL=lidarr1.domain.tld:8686 - - VRKN_LIDARR_1_APIKEY=xxxxxxxxxxxxxxxx - - VRKN_LIDARR_1_SSL=false - - VRKN_LIDARR_1_VERIFY_SSL=false - - VRKN_LIDARR_1_MISSING_DAYS=30 - - VRKN_LIDARR_1_MISSING_DAYS_RUN_SECONDS=300 - - VRKN_LIDARR_1_FUTURE_DAYS=30 - - VRKN_LIDARR_1_FUTURE_DAYS_RUN_SECONDS=300 - - VRKN_LIDARR_1_QUEUE=true - - VRKN_LIDARR_1_QUEUE_RUN_SECONDS=300 - - VRKN_OMBI_1_URL=ombi.domain.tld - - VRKN_OMBI_1_APIKEY=xxxxxxxxxxxxxxxx - - VRKN_OMBI_1_SSL=false - - VRKN_OMBI_1_VERIFY_SSL=false - - VRKN_OMBI_1_GET_REQUEST_TYPE_COUNTS=true - - VRKN_OMBI_1_REQUEST_TYPE_RUN_SECONDS=300 - - VRKN_OMBI_1_GET_REQUEST_TOTAL_COUNTS=true - - VRKN_OMBI_1_REQUEST_TOTAL_RUN_SECONDS=300 - - VRKN_OMBI_1_GET_ISSUE_STATUS_COUNTS=true - - VRKN_OMBI_1_ISSUE_STATUS_RUN_SECONDS=300 - - VRKN_SICKCHILL_1_URL=sickchill.domain.tld:8081 - - VRKN_SICKCHILL_1_APIKEY=xxxxxxxxxxxxxxxx - - VRKN_SICKCHILL_1_SSL=false - - VRKN_SICKCHILL_1_VERIFY_SSL=false - - VRKN_SICKCHILL_1_GET_MISSING=true - - VRKN_SICKCHILL_1_GET_MISSING_RUN_SECONDS=300 depends_on: - influxdb restart: unless-stopped @@ -118,7 +33,7 @@ services: - internal ports: - 3000:3000 - volumes: + volumes: - /path/to/docker-grafana/config-folder:/config environment: - GF_PATHS_DATA=/config/data @@ -128,4 +43,4 @@ services: depends_on: - influxdb - varken - restart: unless-stopped \ No newline at end of file + restart: unless-stopped diff --git a/requirements.txt b/requirements.txt index 38e13120..22449b92 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,9 +2,10 @@ # Potential requirements. # pip3 install -r requirements.txt #--------------------------------------------------------- -requests==2.21 +requests==2.28.1 geoip2==2.9.0 influxdb==5.2.0 -schedule==0.6.0 +schedule==1.1.0 distro==1.4.0 -urllib3==1.24.2 \ No newline at end of file +urllib3==1.26.10 +influxdb-client==1.14.0 \ No newline at end of file diff --git a/utilities/historical_tautulli_import.py b/utilities/historical_tautulli_import.py index 62bd0f89..a5f7a14a 100644 --- a/utilities/historical_tautulli_import.py +++ b/utilities/historical_tautulli_import.py @@ -41,7 +41,7 @@ DBMANAGER = DBManager(CONFIG.influx_server) if CONFIG.tautulli_enabled: - GEOIPHANDLER = GeoIPHandler(DATA_FOLDER) + GEOIPHANDLER = GeoIPHandler(DATA_FOLDER, CONFIG.tautulli_servers[0].maxmind_license_key) for server in CONFIG.tautulli_servers: TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER) TAUTULLI.get_historical(days=opts.days) diff --git a/varken.xml b/varken.xml index b846c575..ab09d2d5 100644 --- a/varken.xml +++ b/varken.xml @@ -51,5 +51,6 @@ 99 100 + False /mnt/user/appdata/varken \ No newline at end of file diff --git a/varken/influxdb2manager.py b/varken/influxdb2manager.py new file mode 100644 index 00000000..62229ec8 --- /dev/null +++ b/varken/influxdb2manager.py @@ -0,0 +1,48 @@ +from sys import exit +from logging import getLogger +import influxdb_client +from influxdb_client import InfluxDBClient +from influxdb_client.client.write_api import SYNCHRONOUS + + +class InfluxDB2Manager(object): + def __init__(self, server): + self.server = server + self.logger = getLogger() + if self.server.url == "influxdb2.domain.tld": + self.logger.critical("You have not configured your varken.ini. Please read Wiki page for configuration") + exit() + + self.influx = InfluxDBClient(url=self.server.url, token=self.server.token, org=self.server.org, + timeout=self.server.timeout, verify_ssl=self.server.verify_ssl, + ssl_ca_cert=self.server.ssl) + self.influx_write_api = self.influx.write_api(write_options=SYNCHRONOUS) + + # Create the bucket if needed + + bucket_api = self.influx.buckets_api() + + try: + bucket = bucket_api.find_bucket_by_name(self.server.bucket) + + if bucket is None: + self.logger.info('Creating bucket %s', self.server.bucket) + + org_api = influxdb_client.service.organizations_service.OrganizationsService(self.influx.api_client) + orgs = org_api.get_orgs() + for org in orgs.orgs: + if org.name == self.server.org: + my_org = org + + self.influx.buckets_api().create_bucket(bucket_name=self.server.bucket, org_id=my_org.id) + except Exception as e: + self.logger.error('Failed creating new InfluxDB bucket! Error: %s', e) + + def write_points(self, data): + self.logger.info('Writing Data to InfluxDBv2 %s', data) + + try: + self.influx_write_api.write(bucket=self.server.bucket, record=data) + except Exception as e: + self.logger.exception('Error writing data to influxdb2. Dropping this set of data. ' + 'Check your database! Error: %s', e) diff --git a/varken/iniparser.py b/varken/iniparser.py index e241f31b..fc84d876 100644 --- a/varken/iniparser.py +++ b/varken/iniparser.py @@ -9,7 +9,7 @@ from varken.varkenlogger import BlacklistFilter from varken.structures import SickChillServer, UniFiServer from varken.helpers import clean_sid_check, rfc1918_ip_check, boolcheck -from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer +from varken.structures import SonarrServer, RadarrServer, OmbiServer, OverseerrServer, TautulliServer, InfluxServer, Influx2Server class INIParser(object): @@ -17,7 +17,7 @@ def __init__(self, data_folder): self.config = None self.data_folder = data_folder self.filtered_strings = None - self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'tautulli', 'sickchill', 'unifi'] + self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'overseerr', 'tautulli', 'sickchill', 'unifi'] self.logger = getLogger() self.influx_server = InfluxServer() @@ -107,6 +107,7 @@ def url_check(self, url=None, include_port=True, section=None): valid = match(regex, url_check) is not None if not valid: + return url_check if inc_port: self.logger.error('%s is invalid in module [%s]! URL must host/IP and ' 'port if not 80 or 443. ie. localhost:8080', @@ -144,23 +145,47 @@ def parse_opts(self, read_file=False): if read_file: self.config = self.read_file('varken.ini') self.config_blacklist() + # Parse InfluxDB options - try: - url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')), - include_port=False, section='influxdb') - port = int(env.get('VRKN_INFLUXDB_PORT', self.config.getint('influxdb', 'port'))) - ssl = boolcheck(env.get('VRKN_INFLUXDB_SSL', self.config.get('influxdb', 'ssl'))) - verify_ssl = boolcheck(env.get('VRKN_INFLUXDB_VERIFY_SSL', self.config.get('influxdb', 'verify_ssl'))) - - username = env.get('VRKN_INFLUXDB_USERNAME', self.config.get('influxdb', 'username')) - password = env.get('VRKN_INFLUXDB_PASSWORD', self.config.get('influxdb', 'password')) - except NoOptionError as e: - self.logger.error('Missing key in %s. Error: %s', "influxdb", e) - self.rectify_ini() - return + self.influx2_enabled = env.get('VRKN_GLOBAL_INFLUXDB2_ENABLED', + self.config.getboolean('global', 'influx2_enabled')) - self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl, - verify_ssl=verify_ssl) + if self.influx2_enabled: + # Use INFLUX version 2 + try: + url = self.url_check(env.get('VRKN_INFLUXDB2_URL', self.config.get('influx2', 'url')), + section='influx2', include_port=False) + ssl = boolcheck(env.get('VRKN_INFLUXDB2_SSL', self.config.get('influx2', 'ssl'))) + verify_ssl = boolcheck(env.get('VRKN_INFLUXDB2_VERIFY_SSL', self.config.get('influx2', 'verify_ssl'))) + + org = env.get('VRKN_INFLUXDB2_ORG', self.config.get('influx2', 'org')) + bucket = env.get('VRKN_INFLUXDB2_BUCKET', self.config.get('influx2', 'bucket')) + token = env.get('VRKN_INFLUXDB2_TOKEN', self.config.get('influx2', 'token')) + timeout = env.get('VRKN_INFLUXDB2_TIMEOUT', self.config.get('influx2', 'timeout')) + except NoOptionError as e: + self.logger.error('Missing key in %s. Error: %s', "influx2", e) + self.rectify_ini() + return + + self.influx_server = Influx2Server(url=url, token=token, org=org, timeout=timeout, ssl=ssl, + verify_ssl=verify_ssl, bucket=bucket) + else: + try: + url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')), + include_port=False, section='influxdb') + port = int(env.get('VRKN_INFLUXDB_PORT', self.config.getint('influxdb', 'port'))) + ssl = boolcheck(env.get('VRKN_INFLUXDB_SSL', self.config.get('influxdb', 'ssl'))) + verify_ssl = boolcheck(env.get('VRKN_INFLUXDB_VERIFY_SSL', self.config.get('influxdb', 'verify_ssl'))) + + username = env.get('VRKN_INFLUXDB_USERNAME', self.config.get('influxdb', 'username')) + password = env.get('VRKN_INFLUXDB_PASSWORD', self.config.get('influxdb', 'password')) + except NoOptionError as e: + self.logger.error('Missing key in %s. Error: %s', "influxdb", e) + self.rectify_ini() + return + + self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl, + verify_ssl=verify_ssl) # Check for all enabled services for service in self.services: @@ -293,6 +318,27 @@ def parse_opts(self, read_file=False): issue_status_counts=issue_status_counts, issue_status_run_seconds=issue_status_run_seconds) + if service == 'overseerr': + get_request_total_counts = boolcheck(env.get( + f'VRKN_{envsection}_GET_REQUEST_TOTAL_COUNTS', + self.config.get(section, 'get_request_total_counts'))) + request_total_run_seconds = int(env.get( + f'VRKN_{envsection}_REQUEST_TOTAL_RUN_SECONDS', + self.config.getint(section, 'request_total_run_seconds'))) + num_latest_requests_to_fetch = int(env.get( + f'VRKN_{envsection}_GET_LATEST_REQUESTS_TO_FETCH', + self.config.getint(section, 'num_latest_requests_to_fetch'))) + num_latest_requests_seconds = int(env.get( + f'VRKN_{envsection}_NUM_LATEST_REQUESTS_SECONDS', + self.config.getint(section, 'num_latest_requests_seconds'))) + + server = OverseerrServer(id=server_id, url=scheme + url, api_key=apikey, + verify_ssl=verify_ssl, + get_request_total_counts=get_request_total_counts, + request_total_run_seconds=request_total_run_seconds, + num_latest_requests_to_fetch=num_latest_requests_to_fetch, + num_latest_requests_seconds=num_latest_requests_seconds) + if service == 'sickchill': get_missing = boolcheck(env.get(f'VRKN_{envsection}_GET_MISSING', self.config.get(section, 'get_missing'))) diff --git a/varken/overseerr.py b/varken/overseerr.py new file mode 100644 index 00000000..4d4b96b5 --- /dev/null +++ b/varken/overseerr.py @@ -0,0 +1,133 @@ +from logging import getLogger +from requests import Session, Request +from datetime import datetime, timezone + +from varken.helpers import connection_handler, hashit +from varken.structures import OverseerrRequestCounts + + +class OverseerrAPI(object): + def __init__(self, server, dbmanager): + self.dbmanager = dbmanager + self.server = server + # Create session to reduce server web thread load, and globally define pageSize for all requests + self.session = Session() + self.session.headers = {'X-Api-Key': self.server.api_key} + self.logger = getLogger() + + def __repr__(self): + return f"" + + def get_request_counts(self): + now = datetime.now(timezone.utc).astimezone().isoformat() + endpoint = '/api/v1/request/count' + + req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) + get_req = connection_handler(self.session, req, self.server.verify_ssl) + + if not get_req: + return + + requests = OverseerrRequestCounts(**get_req) + influx_payload = [ + { + "measurement": "Overseerr", + "tags": { + "type": "Request_Counts" + }, + "time": now, + "fields": { + "pending": requests.pending, + "approved": requests.approved, + "processing": requests.processing, + "available": requests.available, + "total": requests.total, + "movies": requests.movie, + "tv": requests.tv, + "declined": requests.declined + } + } + ] + + if influx_payload: + self.dbmanager.write_points(influx_payload) + else: + self.logger.warning("No data to send to influx for overseerr-request-counts instance, discarding.") + + def get_latest_requests(self): + now = datetime.now(timezone.utc).astimezone().isoformat() + endpoint = '/api/v1/request?take=' + str(self.server.num_latest_requests_to_fetch) + '&filter=all&sort=added' + movie_endpoint = '/api/v1/movie/' + tv_endpoint = '/api/v1/tv/' + + # GET THE LATEST n REQUESTS + req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) + get_latest_req = connection_handler(self.session, req, self.server.verify_ssl) + + # RETURN NOTHING IF NO RESULTS + if not get_latest_req: + self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.") + return + + influx_payload = [] + + # Request Type: Movie = 1, TV Show = 0 + for result in get_latest_req['results']: + if result['type'] == 'tv': + req = self.session.prepare_request(Request('GET', + self.server.url + + tv_endpoint + + str(result['media']['tmdbId']))) + get_tv_req = connection_handler(self.session, req, self.server.verify_ssl) + hash_id = hashit(f'{get_tv_req["id"]}{get_tv_req["name"]}') + + influx_payload.append( + { + "measurement": "Overseerr", + "tags": { + "type": "Requests", + "server": self.server.id, + "request_type": 0, + "status": get_tv_req['mediaInfo']['status'], + "title": get_tv_req['name'], + "requested_user": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['displayName'], + "requested_date": get_tv_req['mediaInfo']['requests'][0]['createdAt'] + }, + "time": now, + "fields": { + "hash": hash_id + } + } + ) + + if result['type'] == 'movie': + req = self.session.prepare_request(Request('GET', + self.server.url + + movie_endpoint + + str(result['media']['tmdbId']))) + get_movie_req = connection_handler(self.session, req, self.server.verify_ssl) + hash_id = hashit(f'{get_movie_req["id"]}{get_movie_req["title"]}') + + influx_payload.append( + { + "measurement": "Overseerr", + "tags": { + "type": "Requests", + "server": self.server.id, + "request_type": 1, + "status": get_movie_req['mediaInfo']['status'], + "title": get_movie_req['title'], + "requested_user": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['displayName'], + "requested_date": get_movie_req['mediaInfo']['requests'][0]['createdAt'] + }, + "time": now, + "fields": { + "hash": hash_id + } + } + ) + + if influx_payload: + self.dbmanager.write_points(influx_payload) + else: + self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.") diff --git a/varken/radarr.py b/varken/radarr.py index 6692ddf7..94719634 100644 --- a/varken/radarr.py +++ b/varken/radarr.py @@ -2,7 +2,7 @@ from requests import Session, Request from datetime import datetime, timezone -from varken.structures import RadarrMovie, Queue +from varken.structures import QueuePages, RadarrMovie, RadarrQueue from varken.helpers import hashit, connection_handler @@ -19,7 +19,7 @@ def __repr__(self): return f"" def get_missing(self): - endpoint = '/api/movie' + endpoint = '/api/v3/movie' now = datetime.now(timezone.utc).astimezone().isoformat() influx_payload = [] missing = [] @@ -37,7 +37,7 @@ def get_missing(self): return for movie in movies: - if movie.monitored and not movie.downloaded: + if movie.monitored and not movie.hasFile: if movie.isAvailable: ma = 0 else: @@ -66,35 +66,53 @@ def get_missing(self): } ) - self.dbmanager.write_points(influx_payload) + if influx_payload: + self.dbmanager.write_points(influx_payload) + else: + self.logger.warning("No data to send to influx for radarr-missing instance, discarding.") def get_queue(self): - endpoint = '/api/queue' + endpoint = '/api/v3/queue' now = datetime.now(timezone.utc).astimezone().isoformat() influx_payload = [] + pageSize = 250 + params = {'pageSize': pageSize, 'includeMovie': True, 'includeUnknownMovieItems': False} + queueResponse = [] queue = [] - req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) + req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) get = connection_handler(self.session, req, self.server.verify_ssl) if not get: return - for movie in get: + response = QueuePages(**get) + queueResponse.extend(response.records) + + while response.totalRecords > response.page * response.pageSize: + page = response.page + 1 + params = {'pageSize': pageSize, 'page': page, 'includeMovie': True, 'includeUnknownMovieItems': False} + req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) + get = connection_handler(self.session, req, self.server.verify_ssl) + if not get: + return + + response = QueuePages(**get) + queueResponse.extend(response.records) + + download_queue = [] + for queueItem in queueResponse: try: - movie['movie'] = RadarrMovie(**movie['movie']) + download_queue.append(RadarrQueue(**queueItem)) except TypeError as e: - self.logger.error('TypeError has occurred : %s while creating RadarrMovie structure', e) + self.logger.warning('TypeError has occurred : %s while creating RadarrQueue structure', e) return - - try: - download_queue = [Queue(**movie) for movie in get] - except TypeError as e: - self.logger.error('TypeError has occurred : %s while creating Queue structure', e) + if not download_queue: + self.logger.warning("No data to send to influx for radarr-queue instance, discarding.") return for queue_item in download_queue: - movie = queue_item.movie + movie = RadarrMovie(**queue_item.movie) name = f'{movie.title} ({movie.year})' @@ -128,4 +146,7 @@ def get_queue(self): } ) - self.dbmanager.write_points(influx_payload) + if influx_payload: + self.dbmanager.write_points(influx_payload) + else: + self.logger.warning("No data to send to influx for radarr-queue instance, discarding.") diff --git a/varken/sonarr.py b/varken/sonarr.py index db93ef74..f9b55c64 100644 --- a/varken/sonarr.py +++ b/varken/sonarr.py @@ -2,7 +2,7 @@ from requests import Session, Request from datetime import datetime, timezone, date, timedelta -from varken.structures import Queue, SonarrTVShow +from varken.structures import SonarrEpisode, SonarrTVShow, SonarrQueue, QueuePages from varken.helpers import hashit, connection_handler @@ -19,16 +19,28 @@ def __init__(self, server, dbmanager): def __repr__(self): return f"" + def get_episode(self, id): + endpoint = '/api/v3/episode' + params = {'episodeIds': id} + + req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) + get = connection_handler(self.session, req, self.server.verify_ssl) + + if not get: + return + + return SonarrEpisode(**get[0]) + def get_calendar(self, query="Missing"): - endpoint = '/api/calendar/' + endpoint = '/api/v3/calendar/' today = str(date.today()) last_days = str(date.today() - timedelta(days=self.server.missing_days)) future = str(date.today() + timedelta(days=self.server.future_days)) now = datetime.now(timezone.utc).astimezone().isoformat() if query == "Missing": - params = {'start': last_days, 'end': today} + params = {'start': last_days, 'end': today, 'includeSeries': True} else: - params = {'start': today, 'end': future} + params = {'start': today, 'end': future, 'includeSeries': True} influx_payload = [] air_days = [] missing = [] @@ -42,22 +54,24 @@ def get_calendar(self, query="Missing"): tv_shows = [] for show in get: try: - tv_shows.append(SonarrTVShow(**show)) + tv_shows.append(SonarrEpisode(**show)) except TypeError as e: - self.logger.error('TypeError has occurred : %s while creating SonarrTVShow structure for show. Data ' + self.logger.error('TypeError has occurred : %s while creating SonarrEpisode structure for show. Data ' 'attempted is: %s', e, show) - for show in tv_shows: - sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}' - if show.hasFile: + for episode in tv_shows: + tvShow = episode.series + sxe = f'S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}' + if episode.hasFile: downloaded = 1 else: downloaded = 0 if query == "Missing": - if show.monitored and not downloaded: - missing.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id)) + if episode.monitored and not downloaded: + missing.append((tvShow['title'], downloaded, sxe, episode.title, + episode.airDateUtc, episode.seriesId)) else: - air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id)) + air_days.append((tvShow['title'], downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId)) for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (air_days or missing): hash_id = hashit(f'{self.server.id}{series_title}{sxe}') @@ -81,45 +95,68 @@ def get_calendar(self, query="Missing"): } ) - self.dbmanager.write_points(influx_payload) + if influx_payload: + self.dbmanager.write_points(influx_payload) + else: + self.logger.warning("No data to send to influx for sonarr-calendar instance, discarding.") def get_queue(self): influx_payload = [] - endpoint = '/api/queue' + endpoint = '/api/v3/queue' now = datetime.now(timezone.utc).astimezone().isoformat() + pageSize = 250 + params = {'pageSize': pageSize, 'includeSeries': True, 'includeEpisode': True, + 'includeUnknownSeriesItems': False} + queueResponse = [] queue = [] - req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) + req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) get = connection_handler(self.session, req, self.server.verify_ssl) - if not get: return + response = QueuePages(**get) + queueResponse.extend(response.records) + + while response.totalRecords > response.page * response.pageSize: + page = response.page + 1 + params = {'pageSize': pageSize, 'page': page, 'includeSeries': True, 'includeEpisode': True, + 'includeUnknownSeriesItems': False} + req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) + get = connection_handler(self.session, req, self.server.verify_ssl) + if not get: + return + + response = QueuePages(**get) + queueResponse.extend(response.records) + download_queue = [] - for show in get: + for queueItem in queueResponse: try: - download_queue.append(Queue(**show)) + download_queue.append(SonarrQueue(**queueItem)) except TypeError as e: self.logger.error('TypeError has occurred : %s while creating Queue structure. Data attempted is: ' - '%s', e, show) + '%s', e, queueItem) if not download_queue: return - for show in download_queue: + for queueItem in download_queue: + tvShow = SonarrTVShow(**queueItem.series) try: - sxe = f"S{show.episode['seasonNumber']:0>2}E{show.episode['episodeNumber']:0>2}" + episode = SonarrEpisode(**queueItem.episode) + sxe = f"S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}" except TypeError as e: self.logger.error('TypeError has occurred : %s while processing the sonarr queue. \ - Remove invalid queue entry. Data attempted is: %s', e, show) + Remove invalid queue entry. Data attempted is: %s', e, queueItem) continue - if show.protocol.upper() == 'USENET': + if queueItem.protocol.upper() == 'USENET': protocol_id = 1 else: protocol_id = 0 - queue.append((show.series['title'], show.episode['title'], show.protocol.upper(), - protocol_id, sxe, show.id, show.quality['quality']['name'])) + queue.append((tvShow.title, episode.title, queueItem.protocol.upper(), + protocol_id, sxe, queueItem.seriesId, queueItem.quality['quality']['name'])) for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue: hash_id = hashit(f'{self.server.id}{series_title}{sxe}') @@ -143,7 +180,8 @@ def get_queue(self): } } ) + if influx_payload: self.dbmanager.write_points(influx_payload) else: - self.logger.debug("No data to send to influx for sonarr instance, discarding.") + self.logger.warning("No data to send to influx for sonarr-queue instance, discarding.") diff --git a/varken/structures.py b/varken/structures.py index deb40179..3637b45a 100644 --- a/varken/structures.py +++ b/varken/structures.py @@ -20,6 +20,16 @@ class InfluxServer(NamedTuple): verify_ssl: bool = False +class Influx2Server(NamedTuple): + url: str = 'localhost' + org: str = 'server' + token: str = 'TOKEN' + bucket: str = 'varken' + timeout: int = 10000 + ssl: bool = False + verify_ssl: bool = False + + class SonarrServer(NamedTuple): api_key: str = None future_days: int = 0 @@ -57,6 +67,17 @@ class OmbiServer(NamedTuple): verify_ssl: bool = False +class OverseerrServer(NamedTuple): + api_key: str = None + id: int = None + url: str = None + verify_ssl: bool = False + get_request_total_counts: bool = False + request_total_run_seconds: int = 30 + num_latest_requests_to_fetch: int = 10 + num_latest_requests_seconds: int = 30 + + class TautulliServer(NamedTuple): api_key: str = None fallback_ip: str = None @@ -91,22 +112,13 @@ class UniFiServer(NamedTuple): # Shared -class Queue(NamedTuple): - downloadId: str = None - episode: dict = None - estimatedCompletionTime: str = None - id: int = None - movie: dict = None - protocol: str = None - quality: dict = None - series: dict = None - size: float = None - sizeleft: float = None - status: str = None - statusMessages: list = None - timeleft: str = None - title: str = None - trackedDownloadStatus: str = None +class QueuePages(NamedTuple): + page: int = None + pageSize: int = None + sortKey: str = None + sortDirection: str = None + totalRecords: str = None + records: list = None # Ombi Structures @@ -127,8 +139,10 @@ class OmbiTVRequest(NamedTuple): childRequests: list = None denied: bool = None deniedReason: None = None + externalProviderId: str = None id: int = None imdbId: str = None + languageProfile: str = None markedAsDenied: str = None overview: str = None posterPath: str = None @@ -145,72 +159,159 @@ class OmbiTVRequest(NamedTuple): class OmbiMovieRequest(NamedTuple): approved: bool = None + approved4K: bool = None available: bool = None + available4K: bool = None background: str = None canApprove: bool = None denied: bool = None + denied4K: None = None deniedReason: None = None + deniedReason4K: None = None digitalRelease: bool = None digitalReleaseDate: None = None + has4KRequest: bool = None id: int = None imdbId: str = None + is4kRequest: bool = None issueId: None = None issues: None = None + langCode: str = None + languageCode: str = None markedAsApproved: str = None + markedAsApproved4K: str = None markedAsAvailable: None = None + markedAsAvailable4K: None = None markedAsDenied: str = None + markedAsDenied4K: str = None overview: str = None posterPath: str = None qualityOverride: int = None released: bool = None releaseDate: str = None + requestedByAlias: str = None requestedDate: str = None + requestedDate4k: str = None requestedUser: dict = None requestedUserId: str = None + requestStatus: str = None requestType: int = None rootPathOverride: int = None showSubscribe: bool = None + source: int = None status: str = None subscribed: bool = None theMovieDbId: int = None title: str = None - langCode: str = None - languageCode: str = None - requestedByAlias: str = None - requestStatus: str = None + + +# Overseerr +class OverseerrRequestCounts(NamedTuple): + pending: int = None + approved: int = None + processing: int = None + available: int = None + total: int = None + movie: int = None + tv: int = None + declined: int = None # Sonarr class SonarrTVShow(NamedTuple): + added: str = None + airTime: str = None + alternateTitles: list = None + certification: str = None + cleanTitle: str = None + ended: bool = None + firstAired: str = None + genres: list = None + id: int = None + images: list = None + imdbId: str = None + languageProfileId: int = None + monitored: bool = None + nextAiring: str = None + network: str = None + overview: str = None + path: str = None + previousAiring: str = None + qualityProfileId: int = None + ratings: dict = None + rootFolderPath: str = None + runtime: int = None + seasonFolder: bool = None + seasons: list = None + seriesType: str = None + sortTitle: str = None + statistics: dict = None + status: str = None + tags: list = None + title: str = None + titleSlug: str = None + tvdbId: int = None + tvMazeId: int = None + tvRageId: int = None + useSceneNumbering: bool = None + year: int = None + + +class SonarrEpisode(NamedTuple): absoluteEpisodeNumber: int = None airDate: str = None airDateUtc: str = None - episodeFile: dict = None episodeFileId: int = None episodeNumber: int = None + grabbed: bool = None hasFile: bool = None id: int = None - lastSearchTime: str = None monitored: bool = None overview: str = None - sceneAbsoluteEpisodeNumber: int = None - sceneEpisodeNumber: int = None - sceneSeasonNumber: int = None seasonNumber: int = None - series: dict = None seriesId: int = None title: str = None unverifiedSceneNumbering: bool = None + sceneAbsoluteEpisodeNumber: int = None + sceneEpisodeNumber: int = None + sceneSeasonNumber: int = None + series: SonarrTVShow = None + tvdbId: int = None + + +class SonarrQueue(NamedTuple): + downloadClient: str = None + downloadId: str = None + episodeId: int = None + id: int = None + indexer: str = None + language: dict = None + protocol: str = None + quality: dict = None + size: float = None + sizeleft: float = None + status: str = None + statusMessages: list = None + title: str = None + trackedDownloadState: str = None + trackedDownloadStatus: str = None + seriesId: int = None + errorMessage: str = None + outputPath: str = None + series: SonarrTVShow = None + episode: SonarrEpisode = None + timeleft: str = None + estimatedCompletionTime: str = None # Radarr class RadarrMovie(NamedTuple): added: str = None - addOptions: str = None - alternativeTitles: list = None + alternateTitles: list = None certification: str = None cleanTitle: str = None - downloaded: bool = None + collection: dict = None + digitalRelease: str = None folderName: str = None genres: list = None hasFile: bool = None @@ -219,32 +320,58 @@ class RadarrMovie(NamedTuple): imdbId: str = None inCinemas: str = None isAvailable: bool = None - lastInfoSync: str = None minimumAvailability: str = None monitored: bool = None movieFile: dict = None + originalTitle: str = None overview: str = None path: str = None - pathState: str = None physicalRelease: str = None - physicalReleaseNote: str = None - profileId: int = None qualityProfileId: int = None ratings: dict = None runtime: int = None - secondaryYear: str = None + secondaryYear: int = None secondaryYearSourceId: int = None - sizeOnDisk: int = None + sizeOnDisk: float = None sortTitle: str = None status: str = None studio: str = None tags: list = None - title: str = None titleSlug: str = None tmdbId: int = None website: str = None year: int = None youTubeTrailerId: str = None + title: str = None + originalLanguage: str = None + addOptions: str = None + popularity: str = None + + +# Radarr Queue +class RadarrQueue(NamedTuple): + customFormats: list = None + downloadClient: str = None + downloadId: str = None + id: int = None + indexer: str = None + languages: list = None + movieId: int = None + protocol: str = None + quality: dict = None + size: float = None + sizeleft: float = None + status: str = None + statusMessages: list = None + title: str = None + trackedDownloadState: str = None + trackedDownloadStatus: str = None + timeleft: str = None + estimatedCompletionTime: str = None + errorMessage: str = None + outputPath: str = None + movie: RadarrMovie = None + timeleft: str = None # Sickchill @@ -364,6 +491,7 @@ class TautulliStream(NamedTuple): reference_id: int = None relay: int = None relayed: int = None + row_id: int = None section_id: str = None secure: str = None selected: int = None @@ -402,6 +530,7 @@ class TautulliStream(NamedTuple): stream_video_codec: str = None stream_video_codec_level: str = None stream_video_decision: str = None + stream_video_dynamic_range: str = None stream_video_framerate: str = None stream_video_full_resolution: str = None stream_video_height: str = None @@ -461,6 +590,7 @@ class TautulliStream(NamedTuple): video_codec: str = None video_codec_level: str = None video_decision: str = None + video_dynamic_range: str = None video_frame_rate: str = None video_framerate: str = None video_full_resolution: str = None @@ -491,7 +621,9 @@ class LidarrQueue(NamedTuple): sizeleft: float = None status: str = None trackedDownloadStatus: str = None + trackedDownloadState: str = None statusMessages: list = None + errorMessage: str = None downloadId: str = None protocol: str = None downloadClient: str = None @@ -499,6 +631,7 @@ class LidarrQueue(NamedTuple): outputPath: str = None downloadForced: bool = None id: int = None + estimatedCompletionTime: str = None class LidarrAlbum(NamedTuple): diff --git a/varken/tautulli.py b/varken/tautulli.py index a8d677b7..746685fd 100644 --- a/varken/tautulli.py +++ b/varken/tautulli.py @@ -129,6 +129,7 @@ def get_activity(self): "tags": { "type": "Session", "session_id": session.session_id, + "ip_address": session.ip_address, "friendly_name": session.friendly_name, "username": session.username, "title": session.full_title, @@ -327,6 +328,7 @@ def get_historical(self, days=30): "tags": { "type": "Session", "session_id": session.session_id, + "ip_address": session.ip_address, "friendly_name": session.friendly_name, "username": session.user, "title": session.full_title,