diff --git a/.github/workflows/pr_unittests.yml b/.github/workflows/pr_unittests.yml.off similarity index 97% rename from .github/workflows/pr_unittests.yml rename to .github/workflows/pr_unittests.yml.off index 5348cc5e0..f4601259f 100755 --- a/.github/workflows/pr_unittests.yml +++ b/.github/workflows/pr_unittests.yml.off @@ -1,10 +1,10 @@ -name: "Unittests Plugins" +name: "Unittests Plugins for PRs (disabled)" #on: [workflow_dispatch, push] on: workflow_dispatch: pull_request: branches: - - 'develop' + - '!develop' jobs: build: diff --git a/.github/workflows/unittests.yml b/.github/workflows/unittests.yml index 9f9ce13c0..2b2406663 100755 --- a/.github/workflows/unittests.yml +++ b/.github/workflows/unittests.yml @@ -6,7 +6,9 @@ on: branches: - '*' - '!release_doc' - + pull_request: + branches: + - 'develop' jobs: build: runs-on: ubuntu-20.04 #latest @@ -24,17 +26,37 @@ jobs: sudo apt-get install gcc --only-upgrade - name: Get branch name - run: | - echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})" - echo ${GITHUB_REF#refs/heads/} + run: echo "branch=$(echo ${GITHUB_REF#refs/heads/})" >>$GITHUB_OUTPUT id: extract_branch - - name: Checkout core from ${{steps.extract_branch.outputs.branch}} branch + - name: Workflow Information + run: | + echo github.event_name '${{ github.event_name }}' + echo github.workflow '${{ github.workflow }}' + echo github.action_repository '${{ github.action_repository }}' + echo github.actor '${{ github.actor }}' + echo github.ref_name '${{ github.ref_name }}' + echo github.ref '${{ github.ref }}' + echo github.base_ref '${{ github.base_ref }}' + echo github.head_ref '${{ github.head_ref }}' + echo github.pull_request.base.ref '${{ github.pull_request.base.ref }}' + echo steps.extract_branch.outputs.branch '${{ steps.extract_branch.outputs.branch }}' + + - name: Checkout core from branch '${{ steps.extract_branch.outputs.branch }}' (for push) + if: github.event_name != 'pull_request' uses: actions/checkout@v3 with: repository: smarthomeNG/smarthome - ref: ${{steps.extract_branch.outputs.branch}} - - name: Checkout plugins from ${{steps.extract_branch.outputs.branch}} branch + ref: ${{ steps.extract_branch.outputs.branch }} + + - name: Checkout core from branch 'develop' (for pull request) + if: github.event_name == 'pull_request' + uses: actions/checkout@v3 + with: + repository: smarthomeNG/smarthome + ref: develop + + - name: Checkout plugins from branch '${{steps.extract_branch.outputs.branch}}' uses: actions/checkout@v3 with: repository: smarthomeNG/plugins diff --git a/__init__.py b/__init__.py index 81149b466..389159c96 100755 --- a/__init__.py +++ b/__init__.py @@ -1,5 +1,5 @@ def plugin_release(): - return '1.9.4' + return '1.9.5' def plugin_branch(): return 'master' diff --git a/alexarc4shng/README.md b/alexarc4shng/README.md index f162d96f2..a1e0eaedc 100755 --- a/alexarc4shng/README.md +++ b/alexarc4shng/README.md @@ -1,6 +1,6 @@ # AlexaRc4shNG -#### Version 1.0.2 +#### Version 1.0.3 The plugin gives the possibilty to control an Alexa-Echo-Device remote by smartHomeNG. So its possible to switch on an TuneIn-Radio Channel, send some messages via Text2Speech when an event happens on the knx-bus or on the Visu. On the Web-Interface you can define your own commandlets (functions). The follwing functions are available on the Web-Interface : @@ -38,7 +38,7 @@ Special thanks to Jonofe from the [Edomi-Forum](https://knx-user-forum.de/forum/ - Pause (pauses the actual media) - Text2Speech (sends a Text to the echo, echo will speak it) - StartTuneInStation (starts a TuneInRadiostation with the guideID you send) -- SSML (Speak to Text with[Speech Synthesis Markup Language](https://developer.amazon.com/docs/custom-skills/speech-synthesis-markup-language-ssml-reference.html)) +- SSML (Speak to Text with [Speech Synthesis Markup Language](https://developer.amazon.com/docs/custom-skills/speech-synthesis-markup-language-ssml-reference.html)) - VolumeAdj (adjusts the volume during playing some media not working from webinterface test functions) - VolumeSet (sets the volume to value from 0-100 percent) @@ -47,7 +47,7 @@ Special thanks to Jonofe from the [Edomi-Forum](https://knx-user-forum.de/forum/ ```yaml = Value to send as alpha = Value to send as numeric -#item.path/# = item-path of the value that should be inserted into text or ssml +"#item.path/#" = item-path of the value that should be inserted into text or ssml = SerialNo. of the device where the command should go to = device family = deviceType @@ -57,6 +57,14 @@ Special thanks to Jonofe from the [Edomi-Forum](https://knx-user-forum.de/forum/ ## ChangeLog +#### 2021.02.10 Version 1.0.3 + +- added MFA for Auto-Login +- added new Parameter (mfa_secret) in the etc/plugin.yaml +- added Step by Step Setup in Web-IF for MFA +- added public function to get the ToDo-List +- added public function to get the Shopping-List + #### 2020.03.20 Version 1.0.2 - changed public function "send_cmd_by_curl" to "send_cmd" @@ -160,7 +168,7 @@ Item2EnableAlexaRC->Item controlled by UZSU or something else which enables the alexa_credentials-> user:pwd (base64 encoded)
item_2_enable_alexa_rc -> Item to allow smarthomeNG to send Commands to Echo's
login_update_cycle->seconds to wait for automatic Login in to refresh the cookie - +mfa_secret-> The MFA-Secret you got from Amazon-Website (fill it out with the Web-Interface) ```yaml @@ -171,6 +179,7 @@ AlexaRc4shNG: item_2_enable_alexa_rc: Item_to_enable_Alexaremote alexa_credentials: : login_update_cycle: 432000 + mfa_secret: ``` @@ -203,12 +212,15 @@ alexa_cmd_01: True:EchoDotKueche:StartTuneInStation:s96141 Value = <20.0 - send command when value of the item becomes less then 20.0 EchodotKueche = Devicename where the Command should be send to Text2Speech = Name of the Commandlet -Value_to_Send = Die Temperatur in der Kueche ist niedriger als 20 Grad Die Temperatur ist jetzt #test.testzimmer.temperature.actual/# Grad #test.testzimmer.temperature.actual/# = item-path of the value that should be inserted +Value_to_Send = Die Temperatur in der Kueche ist niedriger als 20 Grad Die Temperatur ist jetzt #test.testzimmer.temperature.actual/# Grad ``` +```yaml +#test.testzimmer.temperature.actual/# = item-path of the value that should be inserted +``` example:
` -alexa_cmd_01: <20.0:EchoDotKueche:Text2Speech:Die Temperatur in der Kueche ist niedriger als 20 Grad Die Temperatur ist jetzt \#test.testzimmer.temperature.actual/\# Grad +alexa_cmd_01: <20.0:EchoDotKueche:Text2Speech:Die Temperatur in der Kueche ist niedriger als 20 Grad Die Temperatur ist jetzt #test.testzimmer.temperature.actual/# Grad ` You can find the paths of the items on the backend-WebInterface - section items. @@ -266,12 +278,13 @@ Example for settings in an item.conf file : alexa_cmd_01 = '"True:EchoDotKueche:StartTuneInStation:s96141" alexa_cmd_02 ="True:EchoDotKueche:Text2Speech:Hallo das Licht im Buero ist eingeschalten" alexa_cmd_03 = "False:EchoDotKueche:Text2Speech:Hallo das Licht im Buero ist aus" - alexa_cmd_04 = "False:EchoDotKueche:Pause: " + alexa_cmd_04 = "False:EchoDotKueche:Pause:" visu_acl = rw knx_dpt = 1 knx_listen = 1/1/105 knx_send = 1/1/105 - enforce_updates = truey_attr: setting + enforce_updates = true + ``` ### logic.yaml @@ -282,7 +295,7 @@ Right now no logics are implemented. But you can trigger the functions by your o The plugin provides the following publich functions. You can use it for example in logics. -### send_cmd(dvName, cmdName, mValue) +### send_cmd(dvName:str, cmdName:str, mValue:str) example how to use in logics: @@ -293,7 +306,7 @@ sh.AlexaRc4shNG.send_cmd('Kueche','Text2Speech','Der Sensor der Hebenlage signal ``` Sends a command to the device. "dvName" is the name of the device, "cmdName" is the name of the CommandLet, mValue is the value you would send. You can find all this informations on the Web-Interface. -You can also user the [placeholders](#placeholders) +You can also use the [placeholders](#placeholders) - the result will be the HTTP-Status of the request as string (str) @@ -305,11 +318,68 @@ This function returns the Device-Name of the last Echo Device which got a voice myLastDevice = sh.AlexaRc4shNG.get_last_alexa() ``` + +### get_list(type:str) + +This function returns the ToDo or the Shopping list - depending on "type" as dict
+ +valid types are : +```yaml + 'SHOPPING_LIST' + 'TO_DO' +``` + + +```yaml +sh.AlexaRc4shNG.get_list(type:str) +``` +## Example logic to fill Items with List-Infos + +
+
+from datetime import datetime
+# get the Todo-List
+myList=sh.AlexaRc4shNG.get_list('TO_DO')
+for entry in myList:
+  if entry['completed'] == True:
+    entry['icon'] = 'control_clear'
+  else:
+    entry['icon'] = 'control_home'
+  entry['date'] = datetime.fromtimestamp((entry['updatedDateTime']/1000)).strftime("%d.%m.%Y, %H:%M:%S")
+# Write list to Item - type should be list
+sh.Alexa_Lists.list.todo(myList)
+# get the shopping-List
+myList=sh.AlexaRc4shNG.get_list('SHOPPING_LIST')
+for entry in myList:
+  if entry['completed'] == True:
+    entry['icon'] = 'control_clear'
+  else:
+    entry['icon'] = 'jquery_shop'
+  entry['date'] = datetime.fromtimestamp((entry['updatedDateTime']/1000)).strftime("%d.%m.%Y, %H:%M:%S")
+# Write list to Item - type should be list
+sh.Alexa_Lists.list.shopping(myList)
+
+
+ +## Example to show lists in smartVisu with status.activelist +
+
+status.activelist('','Alexa_Lists.list.todo','value','date','value','info')
+
+status.activelist('','Alexa_Lists.list.shopping','value','date','value','info')
+
+
+ +### Ergebnis : +![PlaceHolder](./assets/Alexa_lists.jpg "jpg") + + + # Web-Interface
The Webinterface is reachable on you smarthomeNG server here :
-yourserver:8383/alexarc4shng/ +http://yourserver:8383/plugins/alexarc4shng/ ## Cookie-Handling diff --git a/alexarc4shng/__init__.py b/alexarc4shng/__init__.py index 57af04716..4d686018f 100755 --- a/alexarc4shng/__init__.py +++ b/alexarc4shng/__init__.py @@ -3,7 +3,7 @@ ######################################################################### # Copyright 2020 AndreK andre.kohler01@googlemail.com ######################################################################### -# This file is part of SmartHomeNG. +# This file is part of SmartHomeNG. # # Sample plugin for new plugins to run with SmartHomeNG version 1.5.2 and # upwards. @@ -41,12 +41,16 @@ import time import base64 import requests +from urllib.parse import urlencode - - +ImportPyOTPError = False +try: + import pyotp +except Exception as err: + ImportPyOTPError = True class shngObjects(object): def __init__(self): @@ -113,7 +117,7 @@ def __init__(self, id): ############################################################################## class AlexaRc4shNG(SmartPlugin): - PLUGIN_VERSION = '1.0.2' + PLUGIN_VERSION = '1.0.3' ALLOW_MULTIINSTANCE = False """ Main class of the Plugin. Does all plugin specific stuff and provides @@ -127,7 +131,7 @@ def __init__(self, sh, *args, **kwargs): self.items = Items.get_instance() self.shngObjects = shngObjects() self.shtime = Shtime.get_instance() - + # Init values self.header = '' self.cookie = {} @@ -136,19 +140,27 @@ def __init__(self, sh, *args, **kwargs): self.login_state = False self.last_update_time = '' self.next_update_time = '' + self.ImportPyOTPError = False # get parameters self.cookiefile = self.get_parameter_value('cookiefile') self.host = self.get_parameter_value('host') self.AlexaEnableItem = self.get_parameter_value('item_2_enable_alexa_rc') - self.credentials = self.get_parameter_value('alexa_credentials').encode('utf-8') - self.credentials = base64.decodebytes(self.credentials).decode('utf-8') + self.credentials = self.get_parameter_value('alexa_credentials') + if (self.credentials != 'None'): + self.credentials = self.get_parameter_value('alexa_credentials').encode('utf-8') + self.credentials = base64.decodebytes(self.credentials).decode('utf-8') self.LoginUpdateCycle = self.get_parameter_value('login_update_cycle') + self.mfa_Secret = self.get_parameter_value('mfa_secret') self.update_file=self.sh.get_basedir()+"/plugins/alexarc4shng/lastlogin.txt" self.rotating_log = [] + # Check if MFA is possible + if (ImportPyOTPError == True): + self.logger.warning("Plugin '{}': problem during import of pyotp, you will not be able to use MFA-Authentication".format(self.get_fullname())) + self.ImportPyOTPError = True if not self.init_webinterface(): self._init_complete = False - + return def run(self): @@ -158,12 +170,11 @@ def run(self): self.logger.info("Plugin '{}': start method called".format(self.get_fullname())) # get additional parameters from files self.csrf = self.parse_cookie_file(self.cookiefile) - + # Check login-state - if logged off and credentials are availabel login in if os.path.isfile(self.cookiefile): self.login_state=self.check_login_state() self.check_refresh_login() - if (self.login_state == False and self.credentials != ''): try: os.remove(self.update_file) @@ -171,19 +182,26 @@ def run(self): pass self.check_refresh_login() self.login_state=self.check_login_state() - - # Collect all devices + + # Collect all devices if (self.login_state): self.Echos = self.get_devices_by_request() else: self.Echos = None # enable scheduler if Login should be updated automatically - + if self.credentials != '': self.scheduler_add('check_login', self.check_refresh_login,cycle=300) #self.scheduler.add('plugins.alexarc4shng.check_login', self.check_refresh_login,cycle=300,from_smartplugin=True) + + if self.ImportPyOTPError: + logline = str(self.shtime.now())[0:19] + ' no pyOTP installed you can not use MFA' + else: + logline = str(self.shtime.now())[0:19] + ' pyOTP installed you can use MFA' + self._insert_protocoll_entry(logline) + self.alive = True - + # if you want to create child threads, do not make them daemon = True! # They will not shutdown properly. (It's a python bug) @@ -198,41 +216,41 @@ def stop(self): def parse_item(self, item): itemFound=False i=1 - + myValue = 'alexa_cmd_{}'.format( '%0.2d' %(i)) while myValue in item.conf: - + self.logger.debug("Plugin '{}': parse item: {} Command {}".format(self.get_fullname(), item,myValue)) - + CmdItem_ID = item._name try: myCommand = item.conf[myValue].split(":") - - + + if not self.shngObjects.exists(CmdItem_ID): self.shngObjects.put(CmdItem_ID) - + actDevice = self.shngObjects.get(CmdItem_ID) actDevice.Commands.append(Cmd(myValue)) - + actCommand = len(actDevice.Commands)-1 - + actDevice.Commands[actCommand].command = item.conf[myValue] myCommand = actDevice.Commands[actCommand].command.split(":") self.logger.info("Plugin '{}': parse item: {}".format(self.get_fullname(), item.conf[myValue])) - + actDevice.Commands[actCommand].ItemValue = myCommand[0] actDevice.Commands[actCommand].EndPoint = myCommand[1] actDevice.Commands[actCommand].Action = myCommand[2] actDevice.Commands[actCommand].Value = myCommand[3] itemFound=True - + except Exception as err: print("Error:" ,err) i += 1 myValue = 'alexa_cmd_{}'.format( '%0.2d' %(i)) - + # todo # if interesting item for sending values: # return update_item @@ -240,18 +258,18 @@ def parse_item(self, item): return self.update_item else: return None - + def parse_logic(self, logic): pass def update_item(self, item, caller=None, source=None, dest=None): - + # Item was not changed but double triggered the Upate_Item-Function if (self.AlexaEnableItem != ""): - AlexaEnabledItem = self.items.return_item(self.AlexaEnableItem) + AlexaEnabledItem = self.items.return_item(self.AlexaEnableItem) if AlexaEnabledItem() != True: return - + if item._type == "str": newValue=str(item()) oldValue=str(item.prev_value()) @@ -261,24 +279,24 @@ def update_item(self, item, caller=None, source=None, dest=None): else: newValue=str(item()) oldValue=str(item.prev_value()) - + # Nur bei Wertänderung, sonst nix wie raus hier if(oldValue == newValue): - return + return # End Test + - - + CmdItem_ID = item._name - - + + if self.shngObjects.exists(CmdItem_ID): self.logger.debug("Plugin '{}': update_item ws called with item '{}' from caller '{}', source '{}' and dest '{}'".format(self.get_fullname(), item, caller, source, dest)) - + actDevice = self.shngObjects.get(CmdItem_ID) - + for myCommand in actDevice.Commands: - + newValue2Set = myCommand.Value myItemBuffer = myCommand.ItemValue # Spezialfall auf bigger / smaller @@ -287,7 +305,7 @@ def update_item(self, item, caller=None, source=None, dest=None): myCompValue = myCommand.ItemValue.replace("<="," ") myCompValue = myCompValue.replace(".",",") myCompValue = float(myCompValue) - myCommand.ItemValue = actValue + myCommand.ItemValue = actValue if newValue > myCompValue: return elif myCommand.ItemValue.find(">=") >=0: @@ -295,7 +313,7 @@ def update_item(self, item, caller=None, source=None, dest=None): myCompValue = myCommand.ItemValue.replace(">="," ") myCompValue = myCompValue.replace(".",",") myCompValue = float(myCompValue) - myCommand.ItemValue = actValue + myCommand.ItemValue = actValue if newValue < myCompValue: return elif myCommand.ItemValue.find("=") >=0 : @@ -303,7 +321,7 @@ def update_item(self, item, caller=None, source=None, dest=None): myCompValue = myCommand.ItemValue.replace("="," ") myCompValue = myCompValue.replace(".",",") myCompValue = float(myCompValue) - myCommand.ItemValue = actValue + myCommand.ItemValue = actValue if newValue != myCompValue: return elif myCommand.ItemValue.find("<") >=0: @@ -324,7 +342,7 @@ def update_item(self, item, caller=None, source=None, dest=None): return else: actValue = str(item()) - + if ("volume" in myCommand.Action.lower()): httpStatus, myPlayerInfo = self.receive_info_by_request(myCommand.EndPoint,"LoadPlayerInfo","") # Store Player-Infos to Device @@ -341,7 +359,7 @@ def update_item(self, item, caller=None, source=None, dest=None): actVolume = int(item()) except: actVolume = 50 - + if ("volumeadj" in myCommand.Action.lower()): myDelta = int(myCommand.Value) if actVolume+myDelta < 0: @@ -350,38 +368,38 @@ def update_item(self, item, caller=None, source=None, dest=None): newValue2Set = 100 else: newValue2Set =actVolume+myDelta - + # neuen Wert speichern in item if ("volume" in myCommand.Action.lower()): item._value = newValue2Set - + if (actValue == str(myCommand.ItemValue) and myCommand): myCommand.ItemValue = myItemBuffer self.send_cmd(myCommand.EndPoint,myCommand.Action,newValue2Set) - - + + # find Value for Key in Json-structure - + def search(self,p, strsearch): - if type(p) is dict: + if type(p) is dict: if strsearch in p: tokenvalue = p[strsearch] if not tokenvalue is None: return tokenvalue else: for i in p: - tokenvalue = self.search(p[i], strsearch) + tokenvalue = self.search(p[i], strsearch) if not tokenvalue is None: return tokenvalue - + # handle Protocoll Entries def _insert_protocoll_entry(self, entry): if len(self.rotating_log) > 400: del self.rotating_log[400:] self.rotating_log.insert (0,entry) - + # Check if update of login is needed def check_refresh_login(self): my_file= self.update_file @@ -392,7 +410,7 @@ def check_refresh_login(self): fp.close() except: last_update_time = 0 - + mytime = time.time() if (last_update_time + self.LoginUpdateCycle < mytime): self.log_off() @@ -405,10 +423,10 @@ def check_refresh_login(self): else: self.last_update_time = datetime.fromtimestamp(last_update_time).strftime('%Y-%m-%d %H:%M:%S') self.next_update_time = datetime.fromtimestamp(last_update_time+self.LoginUpdateCycle).strftime('%Y-%m-%d %H:%M:%S') - - - - + + + + def replace_mutated_vowel(self,mValue): search = ["ä" , "ö" , "ü" , "ß" , "Ä" , "Ö", "Ü", "&" , "é", "á", "ó", "ß"] replace = ["ae", "oe", "ue", "ss", "Ae", "Oe","Ue", "und", "e", "a", "o", "ss"] @@ -421,16 +439,16 @@ def replace_mutated_vowel(self,mValue): counter +=1 except: pass - + return myNewValue - + ############################################## # Amazon API - Calls ############################################## - - + + def check_login_state(self): try: myHeader={ @@ -447,12 +465,12 @@ def check_login_state(self): myHeader = response.headers myDict=json.loads(myContent) mySession.close() - + self.logger.info('Status of check_login_state: %d' % response.status_code) - + logline = str(self.shtime.now())[0:19] +' Status of check_login_state: %d' % response.status_code self._insert_protocoll_entry(logline) - + myAuth =myDict['authentication']['authenticated'] if (myAuth == True): self.logger.info('Login-State checked - Result: Logged ON' ) @@ -461,17 +479,53 @@ def check_login_state(self): return True else: self.logger.info('Login-State checked - Result: Logged OFF' ) - logline = str(self.shtime.now())[0:19] +' Login-State checked - Result: Logged OFF' + logline = str(self.shtime.now())[0:19] +' Login-State checked - Result: Logged OFF' self._insert_protocoll_entry(logline) return False - - - + + + except Exception as err: self.logger.error('Login-State checked - Result: Logged OFF - try to login again') return False - - + + + def get_list(self, type=""): + if (self.login_state == False): + return [] + myReturnList = [] + myHeader = { "Host": "alexa.amazon.de", + "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:65.0) Gecko/20100101 Firefox/65.0", + "Connection": "keep-alive", + "Content-Type": "application/json; charset=UTF-8", + "Accept-Language": "en-US,en;q=0.5", + "Referer": "https://alexa.amazon.de/spa/index.html", + "Origin":"https://alexa.amazon.de", + "DNT": "1" + } + mySession = requests.Session() + mySession.cookies.update(self.cookie) + response= mySession.get('https://'+self.host + '/api/namedLists?_=1',headers=myHeader,allow_redirects=True) + myContent= response.content.decode() + self.logger.warning('Lists loaded - content : {}'.format(myContent)) + self._insert_protocoll_entry('Lists loaded - content : {}'.format(myContent)) + + myLists = json.loads(myContent) + for mylistItem in myLists['lists']: + actList = mylistItem['itemId'] + encoded_args = urlencode({'listIds': actList}) + myListUrl = 'https://'+self.host + '/api/namedLists/{0}/items?startTime=&endTime=&completed=&{1}&_=2'.format(actList,encoded_args) + myListResponse = mySession.get(myListUrl,headers=myHeader,allow_redirects=True) + myListResponse= myListResponse.content.decode() + myListResponse = json.loads(myListResponse) + self.logger.warning('List-Entry loaded : {}'.format(myListResponse)) + self._insert_protocoll_entry('List-Entry loaded : {}'.format(myListResponse)) + for ListEntry in myListResponse['list']: + if mylistItem['type'] == type: + myReturnList.append({'value': ListEntry['value'].capitalize(), 'completed' : ListEntry['completed'], 'version': ListEntry['version'], 'createdDateTime': ListEntry['createdDateTime'], 'updatedDateTime': ListEntry['updatedDateTime']}) + + return myReturnList + def receive_info_by_request(self,dvName,cmdName,mValue): actEcho = self.Echos.get(dvName) myUrl='https://'+self.host @@ -484,9 +538,9 @@ def receive_info_by_request(self,dvName,cmdName,mValue): actEcho.family, actEcho.deviceType, actEcho.deviceOwnerCustomerId) - + myDescription,myUrl,myDict = self.load_command_let(cmdName,None) - + myHeader = { "Host": "alexa.amazon.de", "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:65.0) Gecko/20100101 Firefox/65.0", "Connection": "keep-alive", @@ -495,7 +549,7 @@ def receive_info_by_request(self,dvName,cmdName,mValue): "Referer": "https://alexa.amazon.de/spa/index.html", "Origin":"https://alexa.amazon.de", "DNT": "1" - } + } mySession = requests.Session() mySession.cookies.update(self.cookie) response= mySession.get(myUrl,headers=myHeader,allow_redirects=True) @@ -504,13 +558,13 @@ def receive_info_by_request(self,dvName,cmdName,mValue): myContent= response.content.decode() myHeader = response.headers myDict=json.loads(myContent) - mySession.close() - + mySession.close() + return myResult,myDict + - - + def get_last_alexa(self): myHeader = { "Host": "alexa.amazon.de", "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:65.0) Gecko/20100101 Firefox/65.0", @@ -520,7 +574,7 @@ def get_last_alexa(self): "Referer": "https://alexa.amazon.de/spa/index.html", "Origin":"https://alexa.amazon.de", "DNT": "1" - } + } mySession = requests.Session() mySession.cookies.update(self.cookie) response= mySession.get('https://'+self.host+'/api/activities?startTime=&size=10&offset=0', @@ -529,11 +583,11 @@ def get_last_alexa(self): myContent= response.content.decode() myHeader = response.headers myDict=json.loads(myContent) - mySession.close() + mySession.close() myDevice = myDict["activities"][0]["sourceDeviceIds"][0]["serialNumber"] myLastDevice = self.Echos.get_Device_by_Serial(myDevice) return myLastDevice - + def send_cmd(self,dvName, cmdName,mValue,path=None): # Parse the value field for dynamic content if (str(mValue).find("#") >= 0 and str(mValue).find("/#") >0): @@ -541,7 +595,7 @@ def send_cmd(self,dvName, cmdName,mValue,path=None): LastPos = str(mValue).find("/#",FirstPos) myItemName = str(mValue)[FirstPos+1:LastPos] myItem=self.items.return_item(myItemName) - + if myItem._type == "num": myValue = str(myItem()) myValue = myValue.replace(".", ",") @@ -552,8 +606,8 @@ def send_cmd(self,dvName, cmdName,mValue,path=None): mValue = mValue[0:FirstPos]+myValue+mValue[LastPos:LastPos-2]+mValue[LastPos+2:len(mValue)] mValue = self.replace_mutated_vowel(mValue) - - + + buffer = BytesIO() actEcho = None try: @@ -566,13 +620,13 @@ def send_cmd(self,dvName, cmdName,mValue,path=None): self.logger.warning('found no Echo with Name : {}'.format(dvName)) self._insert_protocoll_entry('found no Echo with Name : {}'.format(dvName)) return - + myUrl='https://'+self.host - + myDescriptions = '' myDict = {} - - + + myDescription,myUrl,myDict = self.load_command_let(cmdName,path) # complete the URL myUrl='https://'+self.host+myUrl @@ -584,31 +638,40 @@ def send_cmd(self,dvName, cmdName,mValue,path=None): actEcho.family, actEcho.deviceType, actEcho.deviceOwnerCustomerId) - + # replace the placeholders in Payload myHeaders=self.create_request_header() - + postfields = self.parse_json(myDict, mValue, actEcho.serialNumber, actEcho.family, actEcho.deviceType, actEcho.deviceOwnerCustomerId) - - + + try: + logline = str(self.shtime.now())[0:19] + ' sending command to "{}" payload {}'.format(dvName, json.dumps(postfields)) + self._insert_protocoll_entry(logline) + except Exception as err: + pass + myStatus,myRespHeader, myRespCookie, myContent = self.send_post_request(myUrl,myHeaders,self.cookie,postfields) - + myResult = myStatus + logline = str(self.shtime.now())[0:19] + ' Result of sending Command : {}'.format(myResult) + self._insert_protocoll_entry(logline) + if myResult == 200: self.logger.info('Status of send_cmd: %d' % myResult) + else: self.logger.warning("itemStatus of send_cmd: {}: {}".format(myResult, myContent)) - - return myResult - + + return myResult + def get_devices_by_request(self): try: myHeader={ @@ -626,15 +689,15 @@ def get_devices_by_request(self): myDict=json.loads(myContent) mySession.close() myDevices = EchoDevices() - + self.logger.info('Status of get_devices_by_request: %d' % response.status_code) - - - + + + except Exception as err: self.logger.error('Error while getting Devices: %s' %err) return None - + for device in myDict['devices']: deviceFamily=device['deviceFamily'] #if deviceFamily == 'WHA' or deviceFamily == 'VOX' or deviceFamily == 'FIRE_TV' or deviceFamily == 'TABLET': @@ -642,7 +705,7 @@ def get_devices_by_request(self): try: actName = device['accountName'] myDevices.put(Echo(actName)) - + actDevice = myDevices.get(actName) actDevice.serialNumber=device['serialNumber'] actDevice.deviceType=device['deviceType'] @@ -652,12 +715,23 @@ def get_devices_by_request(self): except Exception as err: self.logger.debug('Error while getting Devices: %s' %err) myDevices = None - + return myDevices - - - - + + + + def read_cookie_file(self,cookiefile): + CookieFile = "" + try: + with open (cookiefile, 'r') as fp: + for line in fp: + CookieFile += line + fp.close() + except Exception as err: + self.logger.debug('Cookiefile could not be opened %s' % cookiefile) + + return CookieFile + def parse_cookie_file(self,cookiefile): self.cookie = {} csrf = 'N/A' @@ -666,25 +740,25 @@ def parse_cookie_file(self,cookiefile): for line in fp: if line.find('amazon.de')<0: continue - + lineFields = line.strip().split('\t') if len(lineFields) >= 7: # add Line to self.cookie if lineFields[2] == '/': self.cookie[lineFields[5]]=lineFields[6] - - + + if lineFields[5] == 'csrf': csrf = lineFields[6] fp.close() except Exception as err: self.logger.debug('Cookiefile could not be opened %s' % cookiefile) - + return csrf - - + + def parse_url(self,myDummy,mValue,serialNumber,familiy,deviceType,deviceOwnerCustomerId): - + myDummy = myDummy.strip() myDummy=myDummy.replace(' ','') # for String @@ -693,34 +767,34 @@ def parse_url(self,myDummy,mValue,serialNumber,familiy,deviceType,deviceOwnerCus except Exception as err: print("no String") # for Numbers - try: + try: myDummy=myDummy.replace('""',mValue) except Exception as err: print("no Integer") - + # Inject the Device informations myDummy=myDummy.replace('',serialNumber) myDummy=myDummy.replace('',familiy) myDummy=myDummy.replace('',deviceType) myDummy=myDummy.replace('',deviceOwnerCustomerId) - + return myDummy - + def parse_json(self,myDict,mValue,serialNumber,familiy,deviceType,deviceOwnerCustomerId): myDummy = json.dumps(myDict, sort_keys=True) - + count = 0 for char in myDummy: if char == '{': count = count + 1 - - + + if count > 1: # Find First Pos for inner Object FirstPos = myDummy.find("{",1) - + # Find last Pos for inner Object LastPos = 0 pos1 = 1 @@ -730,14 +804,14 @@ def parse_json(self,myDict,mValue,serialNumber,familiy,deviceType,deviceOwnerCus correctPos = LastPos LastPos = pos1 LastPos = correctPos - - + + innerJson = myDummy[FirstPos+1:LastPos] innerJson = innerJson.replace('"','\\"') - + myDummy = myDummy[0:FirstPos]+'"{'+innerJson+'}"'+myDummy[LastPos+1:myDummy.__len__()] - - + + myDummy = myDummy.strip() myDummy=myDummy.replace(' ','') # for String @@ -746,20 +820,20 @@ def parse_json(self,myDict,mValue,serialNumber,familiy,deviceType,deviceOwnerCus except Exception as err: print("no String") # for Numbers - try: + try: myDummy=myDummy.replace('""',str(mValue)) except Exception as err: print("no Integer") - + # Inject the Device informations myDummy=myDummy.replace('',serialNumber) myDummy=myDummy.replace('',familiy) myDummy=myDummy.replace('',deviceType) myDummy=myDummy.replace('',deviceOwnerCustomerId) - + return myDummy - - + + def create_request_header(self): myheaders= {"Host": "alexa.amazon.de", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:60.0) Gecko/20100101 Firefox/60.0", @@ -774,16 +848,16 @@ def create_request_header(self): "Cache-Control": "no-cache" } return myheaders - + def load_command_let(self,cmdName,path=None): myDescription = '' myUrl = '' myJson = '' retJson = {} - + if path==None: path=self.sh.get_basedir()+"/plugins/alexarc4shng/cmd/" - + try: file=open(path+cmdName+'.cmd','r') for line in file: @@ -804,12 +878,12 @@ def load_command_let(self,cmdName,path=None): except: self.logger.error("Error while loading Commandlet : {}".format(cmdName)) return myDescription,myUrl,retJson + - - + def load_cmd_list(self): retValue=[] - + files = os.listdir(self.sh.get_basedir()+'/plugins/alexarc4shng/cmd/') for line in files: try: @@ -819,7 +893,7 @@ def load_cmd_list(self): retValue.append(newCmd) except: pass - + return json.dumps(retValue) def check_json(self,payload): @@ -828,7 +902,7 @@ def check_json(self,payload): return 'Json OK' except Exception as err: return 'Json - Not OK - '+ err.args[0] - + def delete_cmd_let(self,name): result = "" try: @@ -838,7 +912,7 @@ def delete_cmd_let(self,name): except Exception as err: result = "Status:failure\n" result += "value1:Error - "+err.args[1]+"\n" - + ################## # prepare Response ################## @@ -849,15 +923,15 @@ def delete_cmd_let(self,name): myFields=line.split(":") newEntry[myFields[0]] = myFields[1] - myResponse.append(newEntry) + myResponse.append(newEntry) ################## return json.dumps(myResponse,sort_keys=True) - + def test_cmd_let(self,selectedDevice,txtValue,txtDescription,txt_payload,txtApiUrl): result = "" if (txtApiUrl[0:1] != "/"): txtApiUrl = "/"+txtApiUrl - + JsonResult = self.check_json(txt_payload) if (JsonResult != 'Json OK'): result = "Status:failure\n" @@ -871,7 +945,7 @@ def test_cmd_let(self,selectedDevice,txtValue,txtDescription,txt_payload,txtApiU except Exception as err: result = "Status:failure\n" result += "value1:"+err.args[0]+"\n" - + ################## # prepare Response ################## @@ -882,7 +956,7 @@ def test_cmd_let(self,selectedDevice,txtValue,txtDescription,txt_payload,txtApiU myFields=line.split(":") newEntry[myFields[0]] = myFields[1] - myResponse.append(newEntry) + myResponse.append(newEntry) ################## return json.dumps(myResponse,sort_keys=True) @@ -892,7 +966,7 @@ def load_cmd_2_webIf(self,txtCmdName): result = "Status|OK\n" result += "Description|"+myDescription+"\n" result += "myUrl|"+myUrl+"\n" - result += "payload|"+str(myDict)+"\n" + result += "payload|"+json.dumps(myDict)+"\n" except Exception as err: result = "Status|failure\n" result += "value1|"+err.args[0]+"\n" @@ -906,25 +980,25 @@ def load_cmd_2_webIf(self,txtCmdName): myFields=line.split("|") newEntry[myFields[0]] = myFields[1] - myResponse.append(newEntry) + myResponse.append(newEntry) ################## return json.dumps(myResponse,sort_keys=True) - - + + def save_cmd_let(self,name,description,payload,ApiURL,path=None): if path==None: path=self.sh.get_basedir()+"/plugins/alexarc4shng/cmd/" - + result = "" mydummy = ApiURL[0:1] if (ApiURL[0:1] != "/"): ApiURL = "/"+ApiURL - + JsonResult = self.check_json(payload) if (JsonResult != 'Json OK'): result = "Status:failure\n" result += "value1:"+JsonResult+"\n" - + else: try: myDict = json.loads(payload) @@ -936,13 +1010,13 @@ def save_cmd_let(self,name,description,payload,ApiURL,path=None): file.write("description|"+description+"\r\n") file.write("json|"+myDump+"\r\n") file.close - + result = "Status:OK\n" result += "value1:"+JsonResult + "\n" result += "value2:Saved Commandlet\n" except Exception as err: print (err) - + ################## # prepare Response ################## @@ -953,10 +1027,10 @@ def save_cmd_let(self,name,description,payload,ApiURL,path=None): myFields=line.split(":") newEntry[myFields[0]] = myFields[1] - myResponse.append(newEntry) + myResponse.append(newEntry) ################## return json.dumps(myResponse,sort_keys=True) - + def send_get_request(self,url="", myHeader="",Cookie=""): mySession = requests.Session() mySession.cookies.update(Cookie) @@ -964,7 +1038,7 @@ def send_get_request(self,url="", myHeader="",Cookie=""): headers=myHeader, allow_redirects=True) return response.status_code, response.headers, response.cookies, response.content.decode(),response.url - + def send_post_request(self,url="", myHeader="",Cookie="",postdata=""): mySession = requests.Session() mySession.cookies.update(Cookie) @@ -974,18 +1048,18 @@ def send_post_request(self,url="", myHeader="",Cookie="",postdata=""): allow_redirects=True) mySession.close() return response.status_code, response.headers, mySession.cookies, response.content.decode() - + def parse_response_cookie_2_txt(self, cookie, CollectingTxtCookie): for c in cookie: if c.domain != '': CollectingTxtCookie += c.domain+"\t"+str(c.domain_specified)+"\t"+ c.path+"\t"+ str(c.secure)+"\t"+ str(c.expires)+"\t"+ c.name+"\t"+ c.value+"\r\n" return CollectingTxtCookie - + def parse_response_cookie(self, cookie, CollectingCookie): for c in cookie: - CollectingCookie[c.name] = c.value + CollectingCookie[c.name] = c.value return CollectingCookie - + def collect_postdata(self,content): content = str(content.replace('hidden', '\r\nhidden')) postdata = {} @@ -995,12 +1069,12 @@ def collect_postdata(self,content): data = re.findall(r'hidden.*name="([^"]+).*value="([^"]+).*/',myLine) if len(data) >0: postdata[data[0][0]]= data[0][1] - - + + postdata['showPasswordChecked'] = 'false' return postdata - - + + def auto_login_by_request(self): if self.credentials == '': return False @@ -1024,13 +1098,13 @@ def auto_login_by_request(self): "Connection" : "keep-alive", "Accept-Encoding" : "gzip, deflate, br" } - myStatus,myRespHeader, myRespCookie, myContent,myLocation = self.send_get_request('https://'+self.host,myHeaders) + myStatus,myRespHeader, myRespCookie, myContent,myLocation = self.send_get_request('https://'+self.host+'/spa/index.html',myHeaders) myCollectionTxtCookie = self.parse_response_cookie_2_txt(myRespCookie,myCollectionTxtCookie) myCollectionCookie = self.parse_response_cookie(myRespCookie,myCollectionCookie) PostData = self.collect_postdata(myContent) - + actSessionID = myRespCookie['session-id'] - + self.logger.info('Status of Auto-Login First Step: %d' % myStatus) myResults.append('HTTP : ' + str(myStatus)+'- Step 1 - get Session-ID') #################################################### @@ -1049,17 +1123,17 @@ def auto_login_by_request(self): } newUrl = "https://www.amazon.de"+"/ap/signin/"+actSessionID postfields = urllib3.request.urlencode(PostData) - + myStatus,myRespHeader, myRespCookie, myContent = self.send_post_request(newUrl,myHeaders,myCollectionCookie,PostData) myCollectionTxtCookie = self.parse_response_cookie_2_txt(myRespCookie,myCollectionTxtCookie) myCollectionCookie = self.parse_response_cookie(myRespCookie,myCollectionCookie) PostData = self.collect_postdata(myContent) - + #actSessionID = myRespCookie['session-id'] - + self.logger.info('Status of Auto-Login Second Step: %d' % myStatus) myResults.append('HTTP : ' + str(myStatus)+'- Step 2 - login blank to get referer') - + #################################################### # Start Step 3 - login with form #################################################### @@ -1080,23 +1154,34 @@ def auto_login_by_request(self): PostData['email'] =user PostData['password'] = pwd - + + # If MFA Secret is Set - try with MFA + if (self.mfa_Secret and self.ImportPyOTPError == False): + self.logger.info("Plugin '{}': Try to login via MFA".format(self.get_fullname())) + self.mfa_Secret = self.mfa_Secret.replace(" ","") + totp = pyotp.TOTP(self.mfa_Secret) + mfaCode = totp.now() + PostData['password'] += mfaCode + myResults.append('MFA : ' + 'use MFA/OTP - Login OTP : {}'.format(mfaCode)) + + postfields = urllib3.request.urlencode(PostData) myStatus,myRespHeader, myRespCookie, myContent = self.send_post_request(newUrl,myHeaders,myCollectionCookie,PostData) myCollectionTxtCookie = self.parse_response_cookie_2_txt(myRespCookie,myCollectionTxtCookie) myCollectionCookie = self.parse_response_cookie(myRespCookie,myCollectionCookie) PostData = self.collect_postdata(myContent) - + self.logger.info('Status of Auto-Login third Step: %d' % myStatus) + myResults.append('HTTP : ' + str(myStatus)+'- Step 3 - login with credentials') file=open("/tmp/alexa_step2.html","w") file.write(myContent) file.close - + ################################################################# ## done - third Step - logged in now go an get the goal (csrf) ################################################################# - + myHeaders ={ "User-Agent" : "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:65.0) Gecko/20100101 Firefox/65.0", "Accept-Language" : "de,en-US;q=0.7,en;q=0.3", @@ -1108,14 +1193,14 @@ def auto_login_by_request(self): } Url = 'https://'+self.host+'/templates/oobe/d-device-pick.handlebars' #Url = 'https://'+self.host+'/api/language' - + myStatus,myRespHeader, myRespCookie, myContent,myLocation = self.send_get_request(Url,myHeaders,myCollectionCookie) myCollectionTxtCookie = self.parse_response_cookie_2_txt(myRespCookie,myCollectionTxtCookie) myCollectionCookie = self.parse_response_cookie(myRespCookie,myCollectionCookie) myResults.append('HTTP : ' + str(myStatus)+'- Step 4 - get csrf') self.logger.info('Status of Auto-Login fourth Step: %d' % myStatus) - + #################################################### # check the csrf #################################################### @@ -1133,16 +1218,16 @@ def auto_login_by_request(self): #################################################### try: with open (self.cookiefile, 'w') as myFile: - - + + myFile.write("# AlexaRc4shNG HTTP Cookie File"+"\r\n") myFile.write("# https://www.smarthomeng.de/user/"+"\r\n") myFile.write("# This file was generated by alexarc4shng@smarthomeNG! Edit at your own risk."+"\r\n") - myFile.write("\r\n") + myFile.write("# ---------------------------------------------------------------------------\r\n") for line in myCollectionTxtCookie.splitlines(): myFile.write(line+"\r\n") - myFile.close() - + myFile.close() + myResults.append('cookieFile- Step 6 - creation done') self.cookie = myCollectionCookie self.login_state= self.check_login_state() @@ -1150,31 +1235,34 @@ def auto_login_by_request(self): file=open(self.update_file,"w") file.write(str(mytime)+"\r\n") file.close() - + myResults.append('login state : %s' % self.login_state) except: myResults.append('cookieFile- Step 6 - error while writing new cookie-File') - + for entry in myResults: - logline = str(self.shtime.now())[0:19] + ' ' + entry + logline = str(self.shtime.now())[0:19] + ' ' + entry self._insert_protocoll_entry(logline) + if (self.mfa_Secret != "" and self.ImportPyOTPError == False): + myResults.append('use OTP-Code : '+mfaCode) + return myResults - - - - - + + + + + def log_off(self): myUrl='https://'+self.host+"/logout" myHeaders={"DNT" :"1", "Connection":"keep-alive", "User-Agent":"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:65.0) Gecko/20100101 Firefox/65.0" } - + myStatus,myRespHeader, myRespCookie, myContent,myLocation = self.send_get_request(myUrl,myHeaders, self.cookie) - + self.logger.info('Status of log_off: {}'.format(myStatus)) - + if myStatus == 200: logline = str(self.shtime.now())[0:19] +' successfully logged off' self._insert_protocoll_entry(logline) @@ -1182,13 +1270,13 @@ def log_off(self): else: logline = str(self.shtime.now())[0:19] +' Error while logging off' return "HTTP - " + str(myStatus)+" Error while logging off" + - - + ############################################## # Web-Interface ############################################## - + def init_webinterface(self): """" Initialize the web interface for this plugin @@ -1226,8 +1314,8 @@ def init_webinterface(self): return True - - + + # ------------------------------------------ # Webinterface of the plugin @@ -1258,11 +1346,11 @@ def render_template(self, tmpl_name, **kwargs): """ Render a template and add vars needed gobally (for navigation, etc.) - + :param tmpl_name: Name of the template file to be rendered :param **kwargs: keyworded arguments to use while rendering - - :return: contents of the template after beeing rendered + + :return: contents of the template after beeing rendered """ tmpl = self.tplenv.get_template(tmpl_name) @@ -1271,7 +1359,7 @@ def render_template(self, tmpl_name, **kwargs): **kwargs) def set_cookie_pic(self,CookieOK=False): - dstFile = self.plugin.sh.get_basedir()+'/plugins/alexarc4shng/webif/static/img/plugin_logo_old.png' + dstFile = self.plugin.sh.get_basedir()+'/plugins/alexarc4shng/webif/static/img/plugin_logo.png' srcGood = self.plugin.sh.get_basedir()+'/plugins/alexarc4shng/webif/static/img/alexa_cookie_good.png' srcBad = self.plugin.sh.get_basedir()+'/plugins/alexarc4shng/webif/static/img/alexa_cookie_bad.png' if os.path.isfile(dstFile): @@ -1281,7 +1369,7 @@ def set_cookie_pic(self,CookieOK=False): os.popen('cp '+srcGood + ' ' + dstFile) else: if os.path.isfile(srcBad): - os.popen('cp '+srcBad + ' ' + dstFile) + os.popen('cp '+srcBad + ' ' + dstFile) @cherrypy.expose def index(self, reload=None): @@ -1292,34 +1380,104 @@ def index(self, reload=None): :return: contents of the template after beeing rendered """ - - if (self.plugin.login_state != 'N/A'): + + if (self.plugin.login_state == True): self.set_cookie_pic(True) else: self.set_cookie_pic(False) - + log_file = '' for line in self.plugin.rotating_log: log_file += str(line)+'\n' - + myDevices = self.get_device_list() alexa_device_count = len(myDevices) - - login_info = self.plugin.last_update_time + '('+ self.plugin.next_update_time + ')' - return self.render_template('index.html',device_list=myDevices,csrf_cookie=self.plugin.csrf,alexa_device_count=alexa_device_count,time_auto_login=login_info, log_file=log_file) - - + + login_info = self.plugin.last_update_time + ' ('+ self.plugin.next_update_time + ')' + + if (self.plugin.cookiefile != ""): + cookie_txt = self.plugin.read_cookie_file(self.plugin.cookiefile) + else: + cookie_txt = "" + return self.render_template('index.html', + device_list=myDevices, + csrf_cookie=self.plugin.csrf, + alexa_device_count=alexa_device_count, + time_auto_login=login_info, + log_file=log_file, + cookie_txt=cookie_txt, + pyOTP = self.plugin.ImportPyOTPError) + + @cherrypy.expose + def handle_mfa_html(self, data = None ): + txt_Result = {} + myCommand = json.loads(data) + myOrder = myCommand["Key"] + if myOrder == "Step1": + myUser = myCommand["data"]["User"] + myPwd = myCommand["data"]["Pwd"] + myResult = self.store_credentials_html('', myPwd, myUser, False, '', False) + + txt_Result["Status"] = "OK" + txt_Result["Step"] = myOrder + txt_Result["data"] = { "Result" : myResult } + + + elif myOrder =="Step3": + myMFA = myCommand["data"]["MFA"] + myMFA = myMFA.replace(" ","") + if (len(myMFA) != 52): + txt_Result["Status"] = "ERROR" + txt_Result["Step"] = myOrder + txt_Result["data"] = { "Message" : "MFA - code has not correct length (should be 52)
Try again" } + else: + try: + totp = pyotp.TOTP(myMFA) + mfaCode = totp.now() + txt_Result["Status"] = "OK" + txt_Result["Step"] = myOrder + txt_Result["data"] = { "OTPCode" : mfaCode } + except err as Exception: + txt_Result["Status"] = "ERROR" + txt_Result["Step"] = myOrder + txt_Result["data"] = { "Message" : "OTP could not calculated something seems to be wrong with the MFA
Try again" } + + elif myOrder =="Step5": + myMFA = myCommand["data"]["MFA"] + myMFA = myMFA.replace(" ","") + myUser = myCommand["data"]["User"] + myPwd = myCommand["data"]["Pwd"] + myResult = self.store_credentials_html('', myPwd, myUser, True, myMFA, False) + if ('stored new config to filesystem' in myResult): + txt_Result["Status"] = "OK" + txt_Result["Step"] = myOrder + txt_Result["data"] = { "Result" : myResult } + else: + txt_Result["Status"] = "ERROR" + txt_Result["Step"] = myOrder + txt_Result["data"] = { "Message" : 'could not store Credentials + MFA to /etc/plugin.yaml' } + + elif myOrder =="Step6": + if (myCommand["data"]["command"] == 'login'): + myResult=self.plugin.auto_login_by_request() + txt_Result["Status"] = "OK" + txt_Result["Step"] = myOrder + txt_Result["data"] = { "Result" :{ "LoginState" : self.plugin.login_state} } + + + return json.dumps(txt_Result) + @cherrypy.expose def log_off_html(self,txt_Result=None): txt_Result=self.plugin.log_off() return json.dumps(txt_Result) - + @cherrypy.expose def log_in_html(self,txt_Result=None): txt_Result=self.plugin.auto_login_by_request() return json.dumps(txt_Result) - - + + @cherrypy.expose def handle_buttons_html(self,txtValue=None, selectedDevice=None,txtButton=None,txt_payload=None,txtCmdName=None,txtApiUrl=None,txtDescription=None): if txtButton=="BtnSave": @@ -1335,21 +1493,21 @@ def handle_buttons_html(self,txtValue=None, selectedDevice=None,txtButton=None,t result = self.plugin.delete_cmd_let(txtCmdName) else: pass - + #return self.render_template("index.html",txtresult=result) return result - - + + @cherrypy.expose def build_cmd_list_html(self,reload=None): myCommands = self.plugin.load_cmd_list() return myCommands - - + + def get_device_list(self): if (self.plugin.login_state == True): self.plugin.Echos = self.plugin.get_devices_by_request() - + Device_items = [] try: myDevices = self.plugin.Echos.devices @@ -1362,78 +1520,99 @@ def get_device_list(self): newEntry['deviceType'] = Echo2Add.deviceType newEntry['deviceOwnerCustomerId'] = Echo2Add.deviceOwnerCustomerId Device_items.append(newEntry) - + except Exception as err: self.logger.debug("No devices found: {}".format(err)) - + return Device_items @cherrypy.expose - def store_credentials_html(self, encoded='', pwd = '', user= '', store_2_config=None): + def store_credentials_html(self, encoded='', pwd = '', user= '', store_2_config=None, mfa='',login=False): txt_Result = [] myCredentials = user+':'+pwd byte_credentials = base64.b64encode(myCredentials.encode('utf-8')) encoded = byte_credentials.decode("utf-8") - txt_Result.append("encoded:"+encoded) + txt_Result.append("encoded:"+encoded) txt_Result.append("Encoding done") conf_file=self.plugin.sh.get_basedir()+'/etc/plugin.yaml' - if (store_2_config == 'true'): + if (store_2_config == True): new_conf = "" with open (conf_file, 'r') as myFile: for line in myFile: if line.find('alexa_credentials') > 0: line = ' alexa_credentials: '+encoded+ "\r\n" - new_conf += line - myFile.close() + if line.find('mfa_secret') > 0 : + line = ' mfa_secret: '+mfa+ "\r\n" + new_conf += line + myFile.close() txt_Result.append("replaced credentials in temporary file") with open (conf_file, 'w') as myFile: for line in new_conf.splitlines(): myFile.write(line+'\r\n') myFile.close() txt_Result.append("stored new config to filesystem") + self.plugin.credentials = myCredentials + if login == True: + if (mfa != '' and self.plugin.ImportPyOTPError == False): + # Try to login asap with MFA + self.plugin.mfa_Secret = mfa + else: + self.plugin.mfa_Secret = "" + + txt_Result_Login=self.plugin.auto_login_by_request() + for entry in txt_Result_Login: + txt_Result.append(entry) + return json.dumps(txt_Result) - + @cherrypy.expose - def storecookie_html(self, save=None, cookie_txt=None, txt_Result=None, txtUser=None, txtPwd=None, txtEncoded=None, store_2_config=None): - myLines = cookie_txt.splitlines() + def storecookie_html(self, cookie_txt=None,): + txt_Result={} + myLines = bytes(cookie_txt, "utf-8").decode("unicode_escape").replace('"','').splitlines() # # Problem - different Handling of Cookies by Browser file=open("/tmp/cookie.txt","w") for line in myLines: - file.write(line+"\r\n") + if (line != ""): + file.write(line+"\r\n") file.close() value1 = self.plugin.parse_cookie_file("/tmp/cookie.txt") self.plugin.login_state = self.plugin.check_login_state() - + if (self.plugin.login_state == True): self.set_cookie_pic(True) else: self.set_cookie_pic(False) - - + + if (self.plugin.login_state == False) : # Cookies not found give back an error + + ''' tmpl = self.tplenv.get_template('index.html') return tmpl.render(plugin_shortname=self.plugin.get_shortname(), plugin_version=self.plugin.get_version(), plugin_info=self.plugin.get_info(), p=self.plugin, txt_Result=' Cookies are not saved missing csrf', cookie_txt=cookie_txt, csrf_cookie=value1) - + ''' + txt_Result["data"] = { "Result" : False } + return json.dumps(txt_Result) + # Store the Cookie-file for permanent use file=open(self.plugin.cookiefile,"w") for line in myLines: file.write(line+"\r\n") file.close() - + self.plugin.csrf = value1 - - + + myDevices = self.get_device_list() alexa_device_count = len(myDevices) - - + + ''' tmpl = self.tplenv.get_template('index.html') return tmpl.render(plugin_shortname=self.plugin.get_shortname(), plugin_version=self.plugin.get_version(), plugin_info=self.plugin.get_info(), p=self.plugin, @@ -1442,7 +1621,11 @@ def storecookie_html(self, save=None, cookie_txt=None, txt_Result=None, txtUser= csrf_cookie=value1, device_list=myDevices, alexa_device_count=alexa_device_count) + ''' + txt_Result["data"] = { "Result" : True } + return json.dumps(txt_Result) + + - - + diff --git a/alexarc4shng/assets/Alexa_lists.jpg b/alexarc4shng/assets/Alexa_lists.jpg new file mode 100755 index 000000000..968d248ad Binary files /dev/null and b/alexarc4shng/assets/Alexa_lists.jpg differ diff --git a/alexarc4shng/cmd/Announce_Trumpet.cmd b/alexarc4shng/cmd/Announce_Trumpet.cmd new file mode 100755 index 000000000..6b62d5b53 --- /dev/null +++ b/alexarc4shng/cmd/Announce_Trumpet.cmd @@ -0,0 +1,3 @@ +apiurl|/api/behaviors/preview +description|Test für Announce mit Doorbell +json|{"behaviorId": "PREVIEW", "sequenceJson": {"@type": "com.amazon.alexa.behaviors.model.Sequence", "startNode": {"type": "AlexaAnnouncement", "@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode", "operationPayload": {"customerId": "", "content": [{"display": {"title": "smartHomeNG", "body": "Hallo"}, "speak": {"type": "text", "value": ""}, "locale": "de-DE"}], "expireAfter": "PT10S", "target": {"customerId": "", "devices": [{"deviceSerialNumber": "", "deviceTypeId": ""}]}}}}, "status": "ENABLED"} diff --git a/alexarc4shng/cmd/Announcement.cmd b/alexarc4shng/cmd/Announcement.cmd new file mode 100755 index 000000000..04647a686 --- /dev/null +++ b/alexarc4shng/cmd/Announcement.cmd @@ -0,0 +1,3 @@ +apiurl|/api/behaviors/preview +description|Use SSML to speak-Example: +json|{"behaviorId": "PREVIEW", "sequenceJson": {"@type": "com.amazon.alexa.behaviors.model.Sequence", "startNode": {"operationPayload": {"customerId": "", "content": [{"display": {"title": "smartHomeNG", "body": ""}, "speak": {"type": "text", "value": ""}, "locale": "de-DE"}], "expireAfter": "PT5S", "target": {"customerId": ""}}, "type": "AlexaAnnouncement", "@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode"}}, "status": "ENABLED"} diff --git a/alexarc4shng/cmd/Klingel.cmd b/alexarc4shng/cmd/Klingel.cmd new file mode 100755 index 000000000..26f360a4a --- /dev/null +++ b/alexarc4shng/cmd/Klingel.cmd @@ -0,0 +1,3 @@ +apiurl|/api/behaviors/preview +description|play doorbell routine +json|{"behaviorId": "PREVIEW", "status": "ENABLED", "sequenceJson": {"@type": "com.amazon.alexa.behaviors.model.Sequence", "sequenceId": "amzn1.alexa.sequence.8d9b40ab-91a7-46c1-8d42-1cd53408874f", "startNode": {"@type": "com.amazon.alexa.behaviors.model.SerialNode", "name": null, "nodesToExecute": [{"@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode", "nodeState": null, "name": null, "type": "Alexa.DeviceControls.Volume", "skillId": "amzn1.ask.1p.alexadevicecontrols", "operationPayload": {"customerId": "", "deviceType": "", "deviceSerialNumber": "", "value": 80, "locale": "de-DE"}, "presentationDataList": null, "clientData": null, "context": null, "tag": null}, {"@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode", "nodeState": null, "name": null, "type": "Alexa.Sound", "skillId": "amzn1.ask.1p.sound", "operationPayload": {"customerId": "", "deviceType": "", "deviceSerialNumber": "", "soundStringId": "amzn_sfx_doorbell_chime_02", "locale": "de-DE"}, "presentationDataList": null, "clientData": null, "context": null, "tag": null}, {"@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode", "nodeState": null, "name": null, "type": "Alexa.DeviceControls.Volume", "skillId": "amzn1.ask.1p.alexadevicecontrols", "operationPayload": {"customerId": "", "deviceType": "", "deviceSerialNumber": "", "value": 20, "locale": "de-DE"}, "presentationDataList": null, "clientData": null, "context": null, "tag": null}]}}} diff --git a/alexarc4shng/cmd/MultiText2Speech.cmd b/alexarc4shng/cmd/MultiText2Speech.cmd new file mode 100755 index 000000000..bd0661f3f --- /dev/null +++ b/alexarc4shng/cmd/MultiText2Speech.cmd @@ -0,0 +1,3 @@ +apiurl|/api/behaviors/preview +description|Text to speach +json|{"behaviorId": "PREVIEW", "status": "ENABLED", "sequenceJson": {"@type": "com.amazon.alexa.behaviors.model.Sequence", "sequenceId": "amzn1.alexa.sequence.8d9b40ab-91a7-46c1-8d42-1cd53408874f", "startNode": {"@type": "com.amazon.alexa.behaviors.model.SerialNode", "name": null, "nodesToExecute": [{"@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode", "type": "Alexa.Speak", "operationPayload": {"textToSpeak": "", "locale": "de-DE", "customerId": "", "deviceSerialNumber": "", "deviceType": ""}}, {"@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode", "type": "Alexa.Speak", "operationPayload": {"textToSpeak": "", "locale": "de-DE", "customerId": "A145HAI7HRJWX3", "deviceSerialNumber": "G091830895260311", "deviceType": "A1Z88NGR2BK6A2"}}]}}} diff --git a/alexarc4shng/cmd/SSML.cmd b/alexarc4shng/cmd/SSML.cmd index 778c0263a..47ed3a987 100755 --- a/alexarc4shng/cmd/SSML.cmd +++ b/alexarc4shng/cmd/SSML.cmd @@ -1,3 +1,3 @@ -apiurl|/api/behaviors/preview -description|Use SSML to speak-Example: -json|{"behaviorId": "PREVIEW", "sequenceJson": {"@type": "com.amazon.alexa.behaviors.model.Sequence", "startNode": {"operationPayload": {"customerId": "", "content": [{"display": {"title": "smartHomeNG", "body": ""}, "speak": {"type": "ssml", "value": ""}, "locale": "de-DE"}], "expireAfter": "PT5S", "target": {"customerId": "", "devices": [{"deviceSerialNumber": "", "deviceTypeId": ""}]}}, "type": "AlexaAnnouncement", "@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode"}}, "status": "ENABLED"} +apiurl|/api/behaviors/preview +description|Use SSML to speak-Example: +json|{"behaviorId": "PREVIEW", "sequenceJson": {"@type": "com.amazon.alexa.behaviors.model.Sequence", "startNode": {"operationPayload": {"customerId": "", "content": [{"display": {"title": "smartHomeNG", "body": ""}, "speak": {"type": "ssml", "value": ""}, "locale": "de-DE"}], "expireAfter": "PT10S", "target": {"customerId": "", "devices": [{"deviceSerialNumber": "", "deviceTypeId": ""}]}}, "type": "AlexaAnnouncement", "@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode"}}, "status": "ENABLED"} diff --git a/alexarc4shng/cmd/StartRoutine.cmd b/alexarc4shng/cmd/StartRoutine.cmd new file mode 100755 index 000000000..b91311b2c --- /dev/null +++ b/alexarc4shng/cmd/StartRoutine.cmd @@ -0,0 +1,3 @@ +apiurl|/api/behaviors/preview +description|Start a routine +json|{"behaviorId": "PREVIEW", "status": "ENABLED", "sequenceJson": {"@type": "com.amazon.alexa.behaviors.model.Sequence", "sequenceId": "amzn1.alexa.sequence.8d9b40ab-91a7-46c1-8d42-1cd53408874f", "startNode": {"@type": "com.amazon.alexa.behaviors.model.SerialNode", "name": null, "nodesToExecute": [{"@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode", "nodeState": null, "name": null, "type": "Alexa.DeviceControls.Volume", "skillId": "amzn1.ask.1p.alexadevicecontrols", "operationPayload": {"customerId": "", "deviceType": "", "deviceSerialNumber": "", "value": 20, "locale": "de-DE"}, "presentationDataList": null, "clientData": null, "context": null, "tag": null}, {"@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode", "nodeState": null, "name": null, "type": "Alexa.Sound", "skillId": "amzn1.ask.1p.sound", "operationPayload": {"customerId": "", "deviceType": "", "deviceSerialNumber": "", "soundStringId": "amzn_sfx_doorbell_chime_02", "locale": "de-DE"}, "presentationDataList": null, "clientData": null, "context": null, "tag": null}, {"@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode", "nodeState": null, "name": null, "type": "Alexa.Sound", "skillId": "amzn1.ask.1p.sound", "operationPayload": {"customerId": "A145HAI7HRJWX3", "deviceType": "A1Z88NGR2BK6A2", "deviceSerialNumber": "G091830895260311", "soundStringId": "amzn_sfx_doorbell_chime_02", "locale": "de-DE"}, "presentationDataList": null, "clientData": null, "context": null, "tag": null}, {"@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode", "nodeState": null, "name": null, "type": "Alexa.DeviceControls.Volume", "skillId": "amzn1.ask.1p.alexadevicecontrols", "operationPayload": {"customerId": "", "deviceType": "", "deviceSerialNumber": "", "value": 20, "locale": "de-DE"}, "presentationDataList": null, "clientData": null, "context": null, "tag": null}, {"@type": "com.amazon.alexa.behaviors.model.OpaquePayloadOperationNode", "nodeState": null, "name": null, "type": "Alexa.DeviceControls.Volume", "skillId": "amzn1.ask.1p.alexadevicecontrols", "operationPayload": {"customerId": "", "deviceType": "", "deviceSerialNumber": "", "value": 20, "locale": "de-DE"}, "presentationDataList": null, "clientData": null, "context": null, "tag": null}]}}} diff --git a/alexarc4shng/cmd/StartTuneInStation.cmd b/alexarc4shng/cmd/StartTuneInStation.cmd index 1f2966ab8..0937d3740 100755 --- a/alexarc4shng/cmd/StartTuneInStation.cmd +++ b/alexarc4shng/cmd/StartTuneInStation.cmd @@ -1,3 +1,3 @@ -apiurl|/api/tunein/queue-and-play?deviceSerialNumber=&deviceType=&guideId=&contentType=station&callSign=&mediaOwnerCustomerId= -description|Startet einen TuneIn Radio-Kanel -json|{} +apiurl|/api/tunein/queue-and-play?deviceSerialNumber=&deviceType=&guideId=&contentType=station&callSign=&mediaOwnerCustomerId= +description|Startet einen TuneIn Radio-Kanal +json|{} diff --git a/alexarc4shng/cmd/TuneInNew.cmd b/alexarc4shng/cmd/TuneInNew.cmd new file mode 100755 index 000000000..dc87e9937 --- /dev/null +++ b/alexarc4shng/cmd/TuneInNew.cmd @@ -0,0 +1,3 @@ +apiurl|/api/entertainment/v1/player/queue?deviceSerialNumber=&deviceType= +description|NEW API for TuneIn +json|{"contentToken": "music:V3lKdGRYTnBZeTkwZFc1bFNXNHZjM1JoZEdsdmJrbGtJaXdpY3prMk1UUXhJbDE4ZXlKd2NtVjJhVzkxYzFCaFoyVkpaQ0k2SWxSMWJtVkpibDlUUlVGU1EwZ2lmUT09"} diff --git a/alexarc4shng/locale.yaml b/alexarc4shng/locale.yaml index 9bff4b0db..7b96d043b 100755 --- a/alexarc4shng/locale.yaml +++ b/alexarc4shng/locale.yaml @@ -1,48 +1,39 @@ plugin_translations: # Translations for the plugin specially for the web interface - 'allowed IP': {'de': 'erlaubte IP', 'en': '=', 'fr': 'Adresses IP aprouvées'} - 'last Session': {'de': 'letzte Sitzung', 'en': '=', 'fr': 'Dernière session'} - 'Stream-Modifiers': {'de': 'Stream-Modikatoren', 'en': '=', 'fr': '='} - 'last Session duration': {'de': 'letzte Sitzungs- dauer', 'en': '=', 'fr': '='} - 'Sessions total': {'de': 'Sitzungen gesamt', 'en': '=', 'fr': '='} - 'Settings': {'de': 'Einstellungen', 'en': '=', 'fr': '='} - 'Credentials:': {'de': 'Zugangsdaten:', 'en': '=', 'fr': '='} - 'delete Protocol': {'de': 'Protokoll löschen:', 'en': '=', 'fr': '='} - 'Real-URL': {'de': 'tatsächliche URL', 'en': '=', 'fr': '='} - 'Commit Changes': {'de': 'Änderungen speichern', 'en': '=', 'fr': '='} - 'Store to Config': {'de': 'in Konfiguration speichern', 'en': '=', 'fr': '='} - 'Settings / Cam-Info': {'de': 'Einstellungen / Kamera-Infos', 'en': '=', 'fr': '='} - 'Communication-Log': {'de': 'Kommunikations-Log', 'en': '=', 'fr': '='} - 'active Camera Threads': {'de': 'aktive Kamera-Threads', 'en': '=', 'fr': '='} - 'SSL Certificate Info': {'de': 'SSL Zertifikas Info', 'en': '=', 'fr': '='} - 'Proxy-Credentials': {'de': 'Proxy-Zugangsdaten', 'en': '=', 'fr': '='} - 'Proxy-Authorization': {'de': 'Proxy-Authorisierungs-Typ', 'en': '=', 'fr': '='} - 'Video-Buffer-Size :': {'de': 'Video-Puffer-Grösse', 'en': '=', 'fr': '='} - 'Authorization :': {'de': 'Authorisierungs-Typ', 'en': '=', 'fr': '='} - 'Encode': {'de': 'enkodieren', 'en': '=', 'fr': 'Encoder'} - 'encoded Cred.:': {'de': 'enkodierte Zugangsdaten', 'en': '=', 'fr': 'Cred. encodés'} - 'Result :': {'de': 'Ergebnis', 'en': '=', 'fr': 'Résultat'} - 'Value': {'de': 'Wert', 'en': '=', 'fr': 'Valeur'} - 'Property': {'de': 'Eigenschaft', 'en': '=', 'fr': 'Prioriété'} - 'Threads existing ...': {'de': 'existierende Threads', 'en': '=', 'fr': '='} - 'Auto Update ( 2 sec.)': {'de': 'Auto Update ( 2 Sek.)', 'en': '=', 'fr': '='} - 'last/next Auto-Login' : {'de': 'letztes/nächstes Auto-Login', 'en': '=', 'fr': '='} - 'selected Device' : {'de': 'gwähltes Gerät', 'en': '=', 'fr': '='} - 'No. of Alexa-Devices': {'de': 'Anzahl Alexa-Geräte', 'en': '=', 'fr': '='} - 'LogOff': {'de': 'Ausloggen', 'en': '=', 'fr': 'Déconnection'} - 'LogIn': {'de': 'Einloggen', 'en': '=', 'fr': 'Connection'} - 'Store Cookie': {'de': 'Cookie speichern', 'en': '=', 'fr': 'Sauvegarder Cookie'} - 'Paste the Cookie-File here': {'de': 'Cookie File hier einfügen', 'en': '=', 'fr': 'Coller le fichier cookie ici'} - 'existing Commands': {'de': 'existierende Kommandos', 'en': '=', 'fr': '='} - 'Command-Name': {'de': 'Kommando-Name', 'en': '=', 'fr': '='} - - -# '': {'de': 'Proxy-Authorisierungs-Typ', 'en': '=', 'fr': ''} - - - - - - - - + 'allowed IP' : {'de': 'erlaubte IP', 'en': '=', 'fr': 'Adresses IP aprouvées'} + 'last Session' : {'de': 'letzte Sitzung', 'en': '=', 'fr': 'Dernière session'} + 'Stream-Modifiers' : {'de': 'Stream-Modikatoren', 'en': '=', 'fr': 'Modificateurs de flux'} + 'last Session duration' : {'de': 'letzte Sitzungs- dauer', 'en': '=', 'fr': 'durèe de la dernière session'} + 'Sessions total' : {'de': 'Sitzungen gesamt', 'en': '=', 'fr': 'Nombre de sessions'} + 'Settings' : {'de': 'Einstellungen', 'en': '=', 'fr': 'Réglages'} + 'Credentials:' : {'de': 'Zugangsdaten:', 'en': '=', 'fr': "Données d'accès"} + 'delete Protocol' : {'de': 'Protokoll löschen:', 'en': '=', 'fr': 'Supprimer journal'} + 'Real-URL' : {'de' : 'tatsächliche URL', 'en': '=', 'fr': 'URL réelle'} + 'Commit Changes' : {'de': 'Änderungen speichern', 'en': '=', 'fr': 'Sauvegarder modifications'} + 'Store to Config' : {'de': 'in Konfiguration speichern', 'en': '=', 'fr': 'Sauvegarder la config'} + 'Settings / Cam-Info' : {'de': 'Einstellungen / Kamera-Infos', 'en': '=', 'fr': 'Règlages / Infos caméra'} + 'Communication-Log' : {'de': 'Kommunikations-Log', 'en': '=', 'fr': 'Journal de communication'} + 'active Camera Threads' : {'de': 'aktive Kamera-Threads', 'en': '=', 'fr': 'Threads de caméra actifs'} + 'SSL Certificate Info' : {'de': 'SSL Zertifikas Info', 'en': '=', 'fr': 'Infos sur le certificat SSL'} + 'Proxy-Credentials' : {'de': 'Proxy-Zugangsdaten', 'en': '=', 'fr': "Données d'accès du proxy"} + 'Proxy-Authorization' : {'de': 'Proxy-Authorisierungs-Typ', 'en': '=', 'fr': "Type d'autentification du proxy"} + 'Video-Buffer-Size:' : {'de': 'Video-Puffer-Grösse', 'en': '=', 'fr': 'Taille de la mémoire tampon vidéo'} + 'Authorization :' : {'de': 'Authorisierungs-Typ', 'en': '=', 'fr': "Type d'autentification"} + 'Encode, save and login' : {'de': 'enkodieren,speichern und einloggen', 'en': '=', 'fr': 'Encoder, sauvegarder et connexion'} + 'encoded Cred.:' : {'de': 'enkodierte Zugangsdaten', 'en': '=', 'fr': 'Cred. encodés'} + 'Result :' : {'de': 'Ergebnis', 'en': '=', 'fr': 'Résultat'} + 'Value' : {'de': 'Wert', 'en': '=', 'fr': 'Valeur'} + 'Property' : {'de': 'Eigenschaft', 'en': '=', 'fr': 'Prioriété'} + 'Threads existing ...' : {'de': 'existierende Threads', 'en': '=', 'fr': 'Threads existants'} + 'Auto Update ( 2 sec.)' : {'de': 'Auto Update ( 2 Sek.)', 'en': '=', 'fr': 'Màj automatique ( 2 sec. )'} + 'last/next Auto-Login' : {'de': 'letztes/nächstes Auto-Login', 'en': '=', 'fr': 'Dernière / prochaine connexion'} + 'selected Device' : {'de': 'gewähltes Gerät', 'en': '=', 'fr': 'Appareil choisi'} + 'No. of Alexa-Devices' : {'de': 'Anzahl Alexa-Geräte', 'en': '=', 'fr': "Nombre d'appareils Alexa"} + 'LogOff' : {'de': 'Ausloggen', 'en': '=', 'fr': 'Déconnection'} + 'LogIn' : {'de': 'Einloggen', 'en': '=', 'fr': 'Connection'} + 'Store Cookie' : {'de': 'Cookie speichern', 'en': '=', 'fr': 'Sauvegarder Cookie'} + 'Paste the Cookie-File here': {'de': 'Cookie File hier einfügen', 'en': '=', 'fr': 'Coller le fichier cookie ici'} + 'existing Commands' : {'de': 'existierende Kommandos', 'en': '=', 'fr': 'Commandes existantes'} + 'Command-Name' : {'de': 'Kommando-Name', 'en': '=', 'fr': 'Nom de la commande'} + 'Step' : {'de': 'Schritt', 'en': '=', 'fr': 'Étape'} + 'Reload Page' : {'de': 'Seite neu laden', 'en': '=', 'fr': 'Recharger page'} diff --git a/alexarc4shng/plugin.yaml b/alexarc4shng/plugin.yaml index 2cab5a9a1..447b7ac04 100755 --- a/alexarc4shng/plugin.yaml +++ b/alexarc4shng/plugin.yaml @@ -8,7 +8,7 @@ plugin: maintainer: AndreK tester: henfri, juergen, psilo #documentation: https://www.smarthomeng.de/user/plugins/alexarc4shng/user_doc.html # url of documentation - version: 1.0.2 # Plugin version + version: 1.0.3 # Plugin version sh_minversion: 1.5.2 # minimum shNG version to use this plugin multi_instance: False # plugin supports multi instance classname: AlexaRc4shNG # class containing the plugin @@ -53,6 +53,13 @@ parameters: de: 'Sekunden bis zum automatischen refreshen des Cookie-files' en: 'seconds till the next automatic login to get a new cookie' + mfa_secret: + type: str + default: '' + description: + de: 'Das OTP MFA Secret welches auf der Amazon-Login-Seite angegben wird' + en: 'The OTP MFA Secret from the Amazon Login-Page' + logic_parameters: NONE # No logic parameters for this plugin item_structs: NONE # no item structure needed item_attributes: NONE # no item attributes needed @@ -92,6 +99,23 @@ plugin_functions: de: "Wert, der gesendet werden soll, numerische Werte ohne Hochkomma als Zahl" en: "Value to send, numeric Values without Quotes as Num" + get_list: + type: str + description: + de: "Liefert die Alexa-Shopping-Liste als dict-zurück" + en: "gives back a dict with the Alexa-Shopping-List" + parameters: + type: + type: str + description: + de: "Art der Liste 'SHOPPING_LIST' oder 'TO_DO'" + en: "type of the List 'SHOPPING_LIST' or 'TO_DO'" + valid_list: + - 'SHOPPING_LIST' + - 'TO_DO' + + + get_last_alexa: type: str description: diff --git a/alexarc4shng/requirements.txt b/alexarc4shng/requirements.txt index f2293605c..503fd8f90 100755 --- a/alexarc4shng/requirements.txt +++ b/alexarc4shng/requirements.txt @@ -1 +1,2 @@ requests +pyotp >= 2.6.0 diff --git a/alexarc4shng/user_doc.rst b/alexarc4shng/user_doc.rst index c44c62694..73f3f6089 100755 --- a/alexarc4shng/user_doc.rst +++ b/alexarc4shng/user_doc.rst @@ -43,7 +43,7 @@ Aufruf des Webinterfaces Das Plugin kann aus dem backend aufgerufen werden. Dazu auf der Seite Plugins in der entsprechenden Zeile das Icon in der Spalte **Web Interface** anklicken. -Außerdem kann das Webinterface direkt über ``http://smarthome.local:8383/alexarc4shng`` aufgerufen werden. +Außerdem kann das Webinterface direkt über ``http://smarthome.local:8383/plugins/alexarc4shng`` aufgerufen werden. Beispiele diff --git a/alexarc4shng/webif/static/img/plugin_logo.png b/alexarc4shng/webif/static/img/plugin_logo.png index 43cf3f7d4..75a6edfa7 100755 Binary files a/alexarc4shng/webif/static/img/plugin_logo.png and b/alexarc4shng/webif/static/img/plugin_logo.png differ diff --git a/alexarc4shng/webif/static/js/handler.js b/alexarc4shng/webif/static/js/handler.js index 1734601ab..00fe38070 100755 --- a/alexarc4shng/webif/static/js/handler.js +++ b/alexarc4shng/webif/static/js/handler.js @@ -1,5 +1,312 @@ var selectedDevice; + +//*************************************************** +//Function to store manual inserted cookie - File +//*************************************************** +function BtnStoreCookie() +{ + data = myCodeMirrorConf.getValue() + data=JSON.stringify(data) + data = data.split('\"').join("") + $.ajax({ + url: "storecookie.html", + type: "GET", + data: { cookie_txt : data }, //data, + contentType: "application/json; charset=utf-8", + success: function (response) { + myResult=JSON.parse(response) + if (myResult.data.Result == true) + { + document.getElementById('reloadPage').style.visibility='visible' + document.getElementById('txt_Result').textContent='You did it\nLogin was successfull\nCookie was stored\nPlease reload Page' + document.getElementById('txt_Result').style.backgroundColor="Lightgreen" + document.getElementById('txt_Result').style.color='black' + } + else + { + document.getElementById('txt_Result').textContent='Sorry, login was not successfull\nCookie was not stored\nPlease try again' + document.getElementById('txt_Result').style.backgroundColor="Red" + document.getElementById('txt_Result').style.color='black' + } + }, + error: function (xhr, status, error) { + document.getElementById("txt_Result").innerHTML = "Error while Communication !"; + $("#reload-element").removeClass("fa-spin"); + $("#MFAcardOverlay").hide(); + } + }); +} + + +//*************************************************** +// Function to communicate with the plugin himself +//*************************************************** +function PublicAjax(url, data) +{ + //data = unescape(encodeURIComponent(JSON.stringify(data))) + data=JSON.stringify(data) + $.ajax({ + url: url + ".html", + type: "GET", + data: { data : data }, //data, + contentType: "application/json; charset=utf-8", + success: function (response) { + ValidateMFAResponse(response); + }, + error: function (xhr, status, error) { + document.getElementById("Tooltip").innerHTML= "
Error

Error while Communication !
" + document.getElementById("Tooltip").style.backgroundColor="red" + + + $("#reload-element").removeClass("fa-spin"); + $("#MFAcardOverlay").hide(); + } + }); +} + +//*************************************************** +// Validate the response from Step by Step MFA-Setup +//*************************************************** +function ValidateMFAResponse(response) +{ + myData = JSON.parse(response) + if (myData.Status == 'OK') + { + document.getElementById("Tooltip").style.backgroundColor="#d4edda" + myStep = parseInt(myData.Step.substr(4,1)) + document.getElementById("Status_" + String(+myStep)).classList.add("fa-check-circle") + document.getElementById("Status_" + String(+myStep)).classList.remove("fa-exclamation-triangle") + document.getElementById("Status_" + String(+myStep)).style.color = "green" + switch (myStep) + { + case 1: + { + myToolTip = "
" + myToolTip += "open Amazon-Site and create a new APP

" + myToolTip += "- Amazon-Web-Site will be opened automatically by pressing the button
" + myToolTip += "- Create a new APP
" + myToolTip += '- Select "barcode could not be read" and copy the shown MFA-Secret to to clipboard.
' + myToolTip += "
Press the button to continue" + myToolTip += "
" + document.getElementById("Tooltip").innerHTML= myToolTip + break; + } + case 2: + { + myToolTip = "
" + myToolTip += "Insert the MFA-Secret
" + myToolTip += "- Insert the copied MFA-Secret to the AlexaRc4shNG-Web-Interface
" + myToolTip += '- After you have inserted the copied MFA-Secret, by pressing the button „Code berechnen“ the OTP-Code will be calculated by the plugin.
' + myToolTip += "
Press the button to continue" + myToolTip += "
" + document.getElementById("Tooltip").innerHTML= myToolTip + break; + } + case 3: + { + myToolTip = "
" + myToolTip += "- The calculated Code will be shown on the Web-Interface and automatically copied to the Clipboard.
" + myToolTip += "- Please insert the OTP-code to the amazon-site, when the OTP is accepted please confirm.
" + myToolTip += " (you need two tries to insert it from clipboard, on first try the amazon-Website would not accept the code from clipboard)
" + myToolTip += "
Press the button to continue" + myToolTip += "
" + document.getElementById("Tooltip").innerHTML= myToolTip + document.getElementById("txtOTP").value= myData.data.OTPCode + copyToClipboard(myData.data.OTPCode); + break; + } + case 4: + { + myToolTip = "
" + myToolTip += "Store the MFA-Code to your ./etc/plugin.yaml
" + myToolTip += "
Press the button to continue" + myToolTip += "
" + document.getElementById("Tooltip").innerHTML= myToolTip + break; + } + case 5: + { + myToolTip = "
" + myToolTip += "try to login with MFA
" + myToolTip += "
Press the button to continue" + myToolTip += "
" + document.getElementById("Tooltip").innerHTML= myToolTip + break; + } + case 6: + { + document.getElementById("Status_" + String(+myStep+1)).classList.add("fa-check-circle") + document.getElementById("Status_" + String(+myStep+1)).classList.remove("fa-exclamation-triangle") + document.getElementById("Status_" + String(+myStep+1)).style.color = "green" + document.getElementById("Status_" + String(+myStep+1)+"_1").classList.add("fa-check-circle") + document.getElementById("Status_" + String(+myStep+1)+"_1").classList.remove("fa-exclamation-triangle") + document.getElementById("Status_" + String(+myStep+1)+"_1").style.color = "green" + + myToolTip = "
" + myToolTip += "Successfully done
" + myToolTip += "
Press the reload button to continue" + myToolTip += "
" + document.getElementById("Tooltip").innerHTML= myToolTip + document.getElementById('goal').innerHTML="congratulations
You did it !" + document.getElementById('img_goal').src="static/img/alexa_cookie_good.png" + document.getElementsByTagName("img")[0].src="static/img/alexa_cookie_good.png" + document.getElementById('btnMfaReset').style.visibility="hidden" + document.getElementById('btnMfaReload').style.visibility="visible" + break; + } + } + myStep += 1 + try + { document.getElementById("Line_" + String(+myStep)).style.visibility = "visible" } + catch (e) + {} + + } + else + { + myStep = parseInt(myData.Step.substr(4,1)) + document.getElementById("Status_" + String(+myStep)).classList.remove("fa-check-circle") + document.getElementById("Status_" + String(+myStep)).classList.add("fa-exclamation-triangle") + document.getElementById("Status_" + String(+myStep)).style.color = "red" + switch (myStep) + { + case 3: + { + myToolTip = "
" + myToolTip += "Error
" + myToolTip += "
"+myData.data.Message + myToolTip += "
Please reload page" + myToolTip += "
" + document.getElementById("Tooltip").innerHTML= myToolTip + document.getElementById("Tooltip").style.backgroundColor="red" + break; + } + case 5: + { + myToolTip = "
" + myToolTip += "Error
" + myToolTip += "
"+myData.data.Message + myToolTip += "
Please reload page" + myToolTip += "
" + document.getElementById("Tooltip").innerHTML= myToolTip + document.getElementById("Tooltip").style.backgroundColor="red" + break; + } + case 6: + { + myStep += 1 + document.getElementById("Status_" + String(+myStep)+"_1").classList.remove("fa-check-circle") + document.getElementById("Status_" + String(+myStep)+"_1").classList.add("fa-exclamation-triangle") + document.getElementById("Status_" + String(+myStep)+"_1").style.color = "red" + document.getElementById('img_goal').src="static/img/alexa_cookie_bad.png" + document.getElementsByTagName("img")[0].src="static/img/alexa_cookie_bad.png" + document.getElementById('goal').innerHTML="Sorry, login was not successfull" + document.getElementById('btnMfaReset').style.visibility="visible" + document.getElementById('btnMfaReload').style.visibility="hidden" + myToolTip = "
" + myToolTip += "Please try again
" + myToolTip += "
Press the reset button to continue" + myToolTip += "
" + document.getElementById("Tooltip").innerHTML= myToolTip + document.getElementById("Line_" + String(+myStep)).style.visibility = "visible" + } + } + } + console.log(response) + $("#reload-element").removeClass("fa-spin") + $("#MFAcardOverlay").hide(); +} + +//******************************************* +// MFA-Login Reset +//******************************************* +function mfaReset() +{ + for (var i = 2; i <= 7; i++) + { + document.getElementById("Line_" + String(+i)).style.visibility = "hidden" + document.getElementById("Status_" + String(i-1)).classList.remove("fa-check-circle") + document.getElementById("Status_" + String(i-1)).classList.remove("fa-exclamation-triangle") + } + document.getElementById('btnMfaReset').style.visibility="hidden" + myToolTip = "
" + myToolTip += "Enter Credentials
" + myToolTip += "Enter your credentials for the alexa.amazon-Website
Press the button to continue" + myToolTip += "
" + document.getElementById("Tooltip").innerHTML= myToolTip + document.getElementById("txtMFAUser").value = "" + document.getElementById("txtMFAPwd").value = "" + document.getElementById("txtMFA").value = "" + + +} +//******************************************* +// Step by Step-Handler MFA-Login +//******************************************* +function BtnHandleMFA(step) +{ + //$("#MFAcardOverlay").addClass("fa-spin"); + $("#MFAcardOverlay").show(); + $("#reload-element").addClass("fa-spin") + data = {} + switch(step) + { + case 1: + { + myUser = document.getElementById("txtMFAUser").value + myPwd = document.getElementById("txtMFAPwd").value + data["Key"] ="Step"+String(step); + data["data"]={User: myUser, Pwd:myPwd}; + PublicAjax('handle_mfa', data, step) + break; + } + case 2: + { + data["data"]={} + data["Step"] = "Step2" + data["Status"]="OK"; + myChildWindows = window.open('https://www.amazon.de/a/settings/approval', '_blank', 'location=yes,scrollbars=yes,status=yes'); + ValidateMFAResponse(JSON.stringify(data)); + break; + } + case 3: + { + data["Key"] ="Step"+String(step); + myMFA = document.getElementById("txtMFA").value; + data["data"]={MFA: myMFA} + PublicAjax('handle_mfa', data, step) + break + } + case 4: + { + data["data"]={} + data["Step"] = "Step4" + data["Status"]="OK"; + ValidateMFAResponse(JSON.stringify(data)); + break + } + case 5: + { + myUser = document.getElementById("txtMFAUser").value + myPwd = document.getElementById("txtMFAPwd").value + myMFA = document.getElementById("txtMFA").value; + data["Key"] ="Step"+String(step); + data["data"]={User: myUser, Pwd:myPwd,MFA: myMFA }; + PublicAjax('handle_mfa', data, step) + break; + } + case 6: + { + data["Key"] ="Step"+String(step); + data["data"]={command: 'login' }; + PublicAjax('handle_mfa', data, step) + break; + } + } +} + + //******************************************* // Button Handler for Encoding credentials //******************************************* @@ -8,17 +315,19 @@ function BtnEncode(result) { user = document.getElementById("txtUser").value; pwd = document.getElementById("txtPwd").value; - store2config = document.getElementById("store_2_config").checked; + mfa = "" + store2config = true encoded=user+":"+pwd; encoded=btoa(encoded); - //document.getElementById("txtEncoded").value = encoded; $.ajax({ url: "store_credentials.html", type: "GET", data: { encoded : encoded, user : user, pwd : pwd, - store_2_config : store2config + store_2_config : store2config, + mfa : mfa, + login : true }, contentType: "application/json; charset=utf-8", success: function (response) { @@ -94,7 +403,7 @@ function BtnSave(result) document.getElementById("txtButton").value ="BtnSave"; - myPayload = myCodeMirrorConf.getValue(); + myPayload = myCodeMirrorPayload.getValue(); StoreCMD ( document.getElementById("txtValue").value, @@ -118,12 +427,12 @@ function BtnCheck(result) try { // Block of code to try myValue = document.getElementById("txtValue").value - myPayload = myCodeMirrorConf.getValue(); + myPayload = myCodeMirrorPayload.getValue(); myPayload = myPayload.replace("",myValue); var myTest = JSON.stringify(JSON.parse(myPayload),null,2) - myCodeMirrorConf.setValue(myTest); - myCodeMirrorConf.focus; - myCodeMirrorConf.setCursor(myCodeMirrorConf.lineCount(),0); + myCodeMirrorPayload.setValue(myTest); + myCodeMirrorPayload.focus; + myCodeMirrorPayload.setCursor(myCodeMirrorPayload.lineCount(),0); document.getElementById("txtresult").value = "JSON-Structure is OK"; document.getElementById("resultOK").style.visibility="visible"; document.getElementById("resultNOK").style.visibility="hidden"; @@ -158,7 +467,7 @@ function BtnTest(result) } document.getElementById("txtButton").value ="BtnTest"; - myPayload = myCodeMirrorConf.getValue(); + myPayload = myCodeMirrorPayload.getValue(); TestCMD ( @@ -177,8 +486,6 @@ function BtnTest(result) function BtnDelete(result) { - buildCmdSequence(); - return var filetodelete = document.getElementById("txtCmdName").value; if (filetodelete == "") { alert ("No Command selected to delete, first select one"); @@ -420,7 +727,6 @@ function build_cmd_list(result) function reloadCmds() { - $("#refresh-element").addClass("fa-spin"); $("#reload-element").addClass("fa-spin"); $("#cardOverlay").show(); $.getJSON("build_cmd_list_html", function(result) @@ -497,9 +803,9 @@ function ShowCommand(response,txtCmdName) myjson = objResponse[0][x].split("'").join("\""); myjson = myjson.split("\\").join(""); var myTest = JSON.stringify(JSON.parse(myjson),null,2) - myCodeMirrorConf.setValue(myTest); - myCodeMirrorConf.focus; - myCodeMirrorConf.setCursor(myCodeMirrorConf.lineCount(),0); + myCodeMirrorPayload.setValue(myTest); + myCodeMirrorPayload.focus; + myCodeMirrorPayload.setCursor(myCodeMirrorPayload.lineCount(),0); } } document.getElementById("txtresult").value = myResult; @@ -517,3 +823,47 @@ function ShowCommand(response,txtCmdName) } +//************************************************************************ +//copyToClipboard - copies the finalized Widget to the Clipboard +//************************************************************************ +const copyToClipboard = str => { + const el = document.createElement('textarea'); // Create a + + -
-
+ - + - - - + +
-
+
- - - - Result : - - - -
@@ -143,6 +140,188 @@ {% endblock bodytab1 %} {% block bodytab2 %} +
+
+ +
+ +
+ + + +{% if pyOTP == true %} + +{% endif %} + + + +
+
+
+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ {{ _('Schritt') }} 1 : + {{ _('Credentials:') }} + + + + + + + +
+
+
+ + +{% endblock bodytab2 %} + + + +{% block bodytab3 %}
@@ -187,12 +366,9 @@ }); +{% endblock bodytab3 %} - - -{% endblock bodytab2 %} - -{% block bodytab3 %} +{% block bodytab4 %} -{% endblock pluginstyles %} - -{% block pluginscripts %} - - -{% endblock pluginscripts %} - -{% set tabcount = 6 %} - -{% set tab1title = _(""'AVM Items'" (" ~ avm_item_count ~ ") ") %} -{% set tab2title = _(""'AVM Smarthome Items'" (" ~ smarthome_item_count ~ ") ") %} -{% set tab3title = _(""'Plugin-API'"") %} -{% set tab4title = _(""'Log-Einträge'"") %} -{% set tab5title = _(""'Call Monitor Items'" (" ~ call_monitor_item_count ~ ") ") %} -{% set tab6title = _(""'AVM Smarthome Devices'"") %} - -{% set language = p.get_sh().get_defaultlanguage() %} -{% if language not in ['en','de'] %} - {% set language = 'en' %} -{% endif %} - -{% block headtable %} -
- - - - - - - - - - - - - - - - - - - - -
- {% if p.get_fritz_device().is_available() %} - {{ _('Gerät verfügbar') }} - {% else %} - {{ _('Gerät nicht verfügbar') }} - {% endif %} - {{ _('Verbunden') }} - - {% if p.get_fritz_device().is_available() %} - {{ _('Ja') }}{% if p._fritz_device.is_ssl() %}, SSL{% endif %} - {% else %} - {{ _('Nein') }} - {% endif %} - {{ _('Benutzer') }}{{ p.get_parameter_value_for_display('username') }}
- {% if p._call_monitor %} - {% if p.get_monitoring_service()._listen_active %} - {{ _('Call Monitor verbunden') }} - {% else %} - {{ _('Call Monitor nicht verbunden') }} - {% endif %} - {% endif %} - {{ _('Call Monitor') }} - {% if p._call_monitor %}{{ _('Ja') }}{% if not p.get_monitoring_service()._listen_active %}, {{ _('nicht verbunden') }}{% endif %}{% else %}{{ _('Nein') }}{% endif %}{{ _('Passwort') }}{{ p.get_parameter_value_for_display('password') }}
{{ _('Host') }}{{ p._fritz_device.get_host() }}{{ _('Port') }}{{ p._fritz_device.get_port() }} {% if p._fritz_device.is_ssl() %}(HTTPS){% endif %}
-{% endblock %} - - -{% block buttons %} - - -{% endblock buttons %} - - -{% block bodytab1 %} -
-
- -
-
- - - - - - - - - - - - - - {% for item in avm_items %} - {% set item_id = item.id() %} - {% if p.get_instance_name() %} - {% set instance_key = "avm_data_type@"+p.get_instance_name() %} - {% else %} - {% set instance_key = "avm_data_type" %} - {% endif %} - - - - - - - - - - {% endfor %} - -
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item_id }}{{ item.property.type }}{{ item.conf[instance_key] }}{{ _('.') }}{{ item() }}{{ _('.') }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ _('.') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
-
-
-{% endblock %} - - -{% block bodytab2 %} -
-
- -
-
- - - - - - - - - - - - - {% for item in smarthome_items %} - {% set item_id = item.id() %} - {% if p.get_instance_name() %} - {% set instance_key = "avm_data_type@"+p.get_instance_name() %} - {% else %} - {% set instance_key = "avm_data_type" %} - {% endif %} - - - - - - - - - {% endfor %} - -
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item_id }}{{ item.property.type }}{{ item.conf[instance_key] }}{{ _('.') }}{{ item() }}{{ _('.') }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ _('.') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
-
-
-{% endblock %} - - -{% block bodytab3 %} -
-
- {% for function, dict in p.metadata.plugin_functions.items() %} -
-
- {{ dict['type'] }} {{ function }}({% if dict['parameters'] is not none %}{% for name, paramdict in dict['parameters'].items() %}{% if loop.index > 1 %}, {% endif %}{{ name }}: {{ paramdict['type'] }}{% endfor %}{% endif %}) -
-
- {{ dict['description'][language] }}
- {% if dict['parameters'] is not none %} -
-
- {{ _('Parameter') }}: -
-
-
    - {% for name, paramdict in dict['parameters'].items() %} -
  • - {{ name }}: {{ paramdict['type'] }}
    - {{ paramdict['description'][language] }} -
  • - {% endfor %} -
-
-
- {% endif %} -
-
- {% endfor %} -
-
-{% endblock %} - - -{% block bodytab4 %} -{% set logentries = p.get_device_log_from_lua() %} -
-
- - - - - - - - - - - - {% if logentries %} - {% for logentry in logentries %} - - - - - - - - {% endfor %} - {% endif %} - -
{{ _('Datum')}}{{ _('Uhrzeit')}}{{ _('Meldung')}}{{ _('Nachrichtennummer')}}{{ _('Kategorie')}}
{{ logentry[4] }}{{ logentry[5] }}{{ logentry[0] }} - - {{ logentry[1] }} - - {{ _('cat_'+logentry[2]|string) }}
-
-
-{% endblock %} - - -{% block bodytab5 %} -{% if p._call_monitor %} -
-
- -
-
- - - - - - - - - - - - - {% for item in call_monitor_items %} - {% set item_id = item.id() %} - {% if p.get_instance_name() %} - {% set instance_key = "avm_data_type@"+p.get_instance_name() %} - {% else %} - {% set instance_key = "avm_data_type" %} - {% endif %} - - - - - - - - - {% endfor %} - -
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item_id }}{{ item.property.type }}{{ item.conf[instance_key] }}{{ item() }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
-
-
-{% endif %} -{% endblock %} - -{% block bodytab6 %} -{% if p._fritz_device._smarthome_devices %} -
-
- -
-
- - - - - - - - - - {% for ain in p._fritz_device._smarthome_devices %} - - - - - - {% endfor %} - -
{{ _('No') }}{{ _('Device ain') }}{{ _('Device Details (dict)') }}
{{ loop.index }}{{ ain }}{{ p._fritz_device._smarthome_devices[ain] }}
-
-
- -{% endif %} -{% endblock %} \ No newline at end of file diff --git a/avm/_pv_1_5_12/locale.yaml b/avm/_pv_1_6_8/locale.yaml similarity index 100% rename from avm/_pv_1_5_12/locale.yaml rename to avm/_pv_1_6_8/locale.yaml diff --git a/avm/_pv_1_6_5/plugin.yaml b/avm/_pv_1_6_8/plugin.yaml similarity index 96% rename from avm/_pv_1_6_5/plugin.yaml rename to avm/_pv_1_6_8/plugin.yaml index 334cf3d74..ed4d114e2 100755 --- a/avm/_pv_1_6_5/plugin.yaml +++ b/avm/_pv_1_6_8/plugin.yaml @@ -1,18 +1,18 @@ # Metadata for the Smart-Plugin plugin: # Global plugin attributes - type: interface # plugin type (gateway, interface, protocol, system, web) + type: interface description: de: 'Ansteuerung von AVM FRITZ!Boxen, WLAN-Repeatern, DECT Steckdosen, etc.' en: 'Get and send data from/to AVM devices such as the FRITZ!Box, Wifi Repeaters or DECT sockets.' maintainer: psilo909, sisamiwe - tester: Sandman60, cmalo + tester: bmx, onkelandy, aschwith, schuma # keywords: iot xyz state: 'qa-passed' documentation: http://smarthomeng.de/user/plugins/avm/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/934835-avm-plugin - version: 1.6.6 # Plugin version + version: 1.6.8 # Plugin version sh_minversion: 1.6 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: True # plugin supports multi instance @@ -82,19 +82,35 @@ parameters: type: bool default: False description: - de: '(optional) Aktiviert oder deaktiviert den Zugriff auf AVM Smarthome Geräte mit dem AHA HTTP Interface.' - en: '(optional) Activates or deactivates access to AVM smarthome devices via AHA HTTP interface' + de: '(optional) Aktiviert oder deaktiviert den Zugriff auf AVM HomeAutomation Geräte mit dem AHA HTTP Interface.' + en: '(optional) Activates or deactivates access to AVM HomeAutomation devices via AHA HTTP interface' + log_entry_count: + type: int + default: 200 + description: + de: '(optional) Anzahl der Log-Messages, die verarbeitet/bereitgestellt werden. 0 = alle' + en: '(optional) Number of Log-Messages, which will be displayed. 0 = all' webif_pagelength: type: int - default: 100 + default: 0 valid_list: - -1 + - 0 - 25 - 50 - 100 description: - de: 'Anzahl an Items, die standardmäßig in einer Web Interface Tabelle pro Seite angezeigt werden' - en: 'Amount of items being listed in a web interface table per page by default' + de: 'Anzahl an Items, die standardmäßig in einer Web Interface Tabelle pro Seite angezeigt werden. + 0 = automatisch, -1 = alle' + en: 'Number of items being listed in a web interface table per page by default. + 0 = automatic, -1 = all' + description_long: + de: 'Anzahl an Items, die standardmäßig in einer Web Interface Tabelle pro Seite angezeigt werden.\n + Bei 0 wird die Tabelle automatisch an die Höhe des Browserfensters angepasst.\n + Bei -1 werden alle Tabelleneinträge auf einer Seite angezeigt.' + en: 'Amount of items being listed in a web interface table per page by default.\n + 0 adjusts the table height automatically based on the height of the browser windows.\n + -1 shows all table entries on one page.' item_attributes: # Definition of item attributes defined by this plugin @@ -224,7 +240,7 @@ item_attributes: - 'hue' # w/r Hue (Status und Setzen) num neu 1.6.4 - 'set_saturation' # w/o Saturation Setzen num - 'saturation' # w/r Saturation (Status und Setzen) num neu 1.6.4 - - 'set_colortemperature' # w/o Farbtemperatur Setzten num + - 'set_colortemperature' # w/o Farbtemperatur setzten num - 'colortemperature' # w/r Farbtemperatur (Status und Setzen) num neu 1.6.4 - 'switch_state' # r/w Schaltzustand Steckdose (Status und Setzen) bool @@ -637,7 +653,7 @@ item_structs: type: bool smarthome_temperature_sensor: - temperatur: + temperature: avm_data_type@instance: current_temperature type: num temperature_offset: diff --git a/avm/_pv_1_5_12/requirements.txt b/avm/_pv_1_6_8/requirements.txt similarity index 100% rename from avm/_pv_1_5_12/requirements.txt rename to avm/_pv_1_6_8/requirements.txt diff --git a/avm/_pv_1_5_12/sv_widgets/widget_avm.html b/avm/_pv_1_6_8/sv_widgets/widget_avm.html similarity index 100% rename from avm/_pv_1_5_12/sv_widgets/widget_avm.html rename to avm/_pv_1_6_8/sv_widgets/widget_avm.html diff --git a/avm/_pv_1_6_5/user_doc.rst b/avm/_pv_1_6_8/user_doc.rst similarity index 84% rename from avm/_pv_1_6_5/user_doc.rst rename to avm/_pv_1_6_8/user_doc.rst index ab064ddb6..afc0c80a1 100755 --- a/avm/_pv_1_6_5/user_doc.rst +++ b/avm/_pv_1_6_8/user_doc.rst @@ -4,6 +4,23 @@ AVM Changelog --------- + +1.6.8 +~~~~~ +- HKR: Allow set temperature to be set directly to value 126.5 (off=frost protection mode). Until now, a bug mapped value off (=126.5) to maximum heating (=127). + +1.6.7 +~~~~~ +- Implement plugin configuration "avm_home_automation" to use AHA (AVM HomeAutomation) Interface (Default: False) +- correct typo "temperatur" to "temperature" in struct +- add method "get_device_log_from_lua_separated" to get log already as list of list +- limit Log entries shown on WebIF to recent 200 +- Update WebIF with possibility to adapt table size to screen +- Debugging for 'button' e.g. DECT440 +- Minor code correction / debugging +- Adapt user_doc.rst +- feature provided by plugin avm_smarthome are completely integrated. Therefore tat plugin is marked as deprecated. + 1.6.6 ~~~~~ @@ -102,6 +119,10 @@ Dafür stehen die folgenden Einstellungen zur Verfügung: - `ssl`: True or False => True will add "https", False "http" to the URLs in the plugin - `verify`: True or False => Turns certificate verification on or off. Typically False - `call_monitor`: True or False => Activates or deactivates the MonitoringService, which connects to the FritzDevice's call monitor +- 'call_monitor_incoming_filter': Filter only specific numbers to be watched by call monitor +- 'avm_home_automation': True or False => Activates or deactivates the AHA Interface to communicate with HomeAutomation Devices, +- 'log_entry_count': Number of Log-Messages, which will be displayed. +- 'webif_pagelength': Number of items being listed in a web interface table per page by default. - `instance`: Unique identifier for each FritzDevice / each instance of the plugin Alternativ kann das Plugin auch manuell konfiguriert werden. @@ -121,6 +142,7 @@ Alternativ kann das Plugin auch manuell konfiguriert werden. verify: False # verify ssl certificate call_monitor: 'True' call_monitor_incoming_filter: "... ## optional, don't set if you don't want to watch only one specific number with your call monitor" + avm_home_automation: 'True' instance: fritzbox_7490 fb2: @@ -134,6 +156,7 @@ Alternativ kann das Plugin auch manuell konfiguriert werden. ssl: True # use https or not verify: False # verify ssl certificate call_monitor: 'True' + avm_home_automation: 'False' instance: wlan_repeater_1750 .. note:: @@ -170,7 +193,7 @@ avm_mac ~~~~~~~ Definition der MAC Adresse für Items vom avm_data_type `network_device`. Nur für diese Items mandatory!' -ain +avm_ain ~~~ Definition der Aktor Identifikationsnummer (AIN)Items für smarthome Items. Nur für diese Items mandatory!' @@ -189,18 +212,18 @@ item_structs Zur Vereinfachung der Einrichtung von Items sind für folgende Item-structs vordefiniert: - ``info`` - General Information about Fritzbox -- ``monitor`` - Coll Monitor +- ``monitor`` - Call Monitor - ``tam`` - (für einen) Anrufbeantworter - ``deflection`` - (für eine) Rufumleitung - ``wan`` - WAN Items - ``wlan`` - Wireless Lan Items -- ``device`` - Item eines verbundenen Gerätes -- ``smarthome_general`` - Allgemeine Informationen eines DECT smarthome Devices -- ``smarthome_hkr`` - spezifische Informationen eines DECT Thermostat Devices -- ``smarthome_temperatur_sensor`` - spezifische Informationen eines DECT smarthome Devices mit Temperatursensor -- ``smarthome_alert`` - spezifische Informationen eines DECT smarthome Devices mit Alarmfunktion -- ``smarthome_switch`` - spezifische Informationen eines DECT smarthome Devices mit Schalter -- ``smarthome_powermeter`` - spezifische Informationen eines DECT smarthome Devices mit Strommessung +- ``device`` - Items eines verbundenen Gerätes +- ``smarthome_general`` - Allgemeine Informationen eines AVM HomeAutomation Devices +- ``smarthome_hkr`` - spezifische Informationen eines AVM HomeAutomation Thermostat Devices +- ``smarthome_temperatur_sensor`` - spezifische Informationen eines AVM HomeAutomation Devices mit Temperatursensor +- ``smarthome_alert`` - spezifische Informationen eines AVM HomeAutomation Devices mit Alarmfunktion +- ``smarthome_switch`` - spezifische Informationen eines AVM HomeAutomation Devices mit Schalter +- ``smarthome_powermeter`` - spezifische Informationen eines AVM HomeAutomation Devices mit Strommessung Item Beispiel mit Verwendung der structs ohne Instanz @@ -249,7 +272,7 @@ Item Beispiel mit Verwendung der structs ohne Instanz smarthome: hkr_og_bad: type: foo - ain: 'xxxxx xxxxxxx' + avm_ain: 'xxxxx xxxxxxx' struct: - avm.smarthome_general - avm.smarthome_hkr @@ -468,10 +491,54 @@ Aufruf des Webinterfaces Das Plugin kann aus dem Admin-IF aufgerufen werden. Dazu auf der Seite Plugins in der entsprechenden Zeile das Icon in der Spalte **Web Interface** anklicken. +Es werden nur die Tabs angezeigt, deren Funktionen im Plugin aktiviert sind bzw. die von Fritzdevice unterstützt werden. + Im WebIF stehen folgende Reiter zur Verfügung: - - AVM Items - Tabellarische Auflistung aller Items, die mit dem TR-064 Protokoll ausgelesen werden - - AVM Smarthome Items - Tabellarische Auflistung aller Items, die mit dem AHA Protokoll ausgelesen werden (Items der Smarthome Geräte) - - Plugin-API - Beschreibung der Plugin-API - - Log-Einträge - Listung der Logeinträge der Fritzbox - - Call Monitor Items - Tabellarische Auflistung des Anrufmonitors (nur wenn dieser konfiguriert ist) - - AVM Smarthome Devices - Auflistung der mit der Fritzbox verbundenen Geräte + +AVM Items +~~~~~~~~~ + +Tabellarische Auflistung aller Items, die mit dem TR-064 Protokoll ausgelesen werden + +.. image:: user_doc/assets/webif_tab1.jpg + :class: screenshot + +AVM Smarthome Items +~~~~~~~~~~~~~~~~~~~ +Tabellarische Auflistung aller Items, die mit dem AHA Protokoll ausgelesen werden (Items der AVM HomeAutomation Geräte) + +.. image:: user_doc/assets/webif_tab2.jpg + :class: screenshot + +AVM Smarthome Devices +~~~~~~~~~~~~~~~~~~~~~ + +Auflistung der mit der Fritzbox verbundenen AVM HomeAutomation Geräte + +.. image:: user_doc/assets/webif_tab3.jpg + :class: screenshot + +Call Monitor Items +~~~~~~~~~~~~~~~~~~ + +Tabellarische Auflistung des Anrufmonitors (nur wenn dieser konfiguriert ist) + +.. image:: user_doc/assets/webif_tab4.jpg + :class: screenshot + +Log-Einträge +~~~~~~~~~~~~ + +Listung der Logeinträge der Fritzbox + +.. image:: user_doc/assets/webif_tab5.jpg + :class: screenshot + +Plugin-API +~~~~~~~~~~ + +Beschreibung der Plugin-API + +.. image:: user_doc/assets/webif_tab6.jpg + :class: screenshot + diff --git a/avm/_pv_1_6_8/user_doc/assets/webif_tab1.jpg b/avm/_pv_1_6_8/user_doc/assets/webif_tab1.jpg new file mode 100755 index 000000000..0c4be880d Binary files /dev/null and b/avm/_pv_1_6_8/user_doc/assets/webif_tab1.jpg differ diff --git a/avm/_pv_1_6_8/user_doc/assets/webif_tab2.jpg b/avm/_pv_1_6_8/user_doc/assets/webif_tab2.jpg new file mode 100755 index 000000000..f7a31c49e Binary files /dev/null and b/avm/_pv_1_6_8/user_doc/assets/webif_tab2.jpg differ diff --git a/avm/_pv_1_6_8/user_doc/assets/webif_tab3.jpg b/avm/_pv_1_6_8/user_doc/assets/webif_tab3.jpg new file mode 100755 index 000000000..5cd0775dd Binary files /dev/null and b/avm/_pv_1_6_8/user_doc/assets/webif_tab3.jpg differ diff --git a/avm/_pv_1_6_8/user_doc/assets/webif_tab4.jpg b/avm/_pv_1_6_8/user_doc/assets/webif_tab4.jpg new file mode 100755 index 000000000..5a0489a96 Binary files /dev/null and b/avm/_pv_1_6_8/user_doc/assets/webif_tab4.jpg differ diff --git a/avm/_pv_1_6_8/user_doc/assets/webif_tab5.jpg b/avm/_pv_1_6_8/user_doc/assets/webif_tab5.jpg new file mode 100755 index 000000000..7368fb5d1 Binary files /dev/null and b/avm/_pv_1_6_8/user_doc/assets/webif_tab5.jpg differ diff --git a/avm/_pv_1_6_8/user_doc/assets/webif_tab6.jpg b/avm/_pv_1_6_8/user_doc/assets/webif_tab6.jpg new file mode 100755 index 000000000..520f04432 Binary files /dev/null and b/avm/_pv_1_6_8/user_doc/assets/webif_tab6.jpg differ diff --git a/avm/_pv_1_6_5/webif/__init__.py b/avm/_pv_1_6_8/webif/__init__.py similarity index 93% rename from avm/_pv_1_6_5/webif/__init__.py rename to avm/_pv_1_6_8/webif/__init__.py index a70d399d4..75b8b9da1 100755 --- a/avm/_pv_1_6_5/webif/__init__.py +++ b/avm/_pv_1_6_8/webif/__init__.py @@ -72,7 +72,13 @@ def index(self, reload=None, action=None): self.call_monitor_items.extend(self.plugin._monitoring_service.get_items_incoming()) self.call_monitor_items.extend(self.plugin._monitoring_service.get_items_outgoing()) + try: + pagelength = self.plugin.webif_pagelength + except Exception: + pagelength = 100 + tmpl = self.tplenv.get_template('index.html') + return tmpl.render(plugin_shortname=self.plugin.get_shortname(), plugin_version=self.plugin.get_version(), plugin_info=self.plugin.get_info(), @@ -83,7 +89,7 @@ def index(self, reload=None, action=None): smarthome_items=sorted(self.plugin.get_fritz_device().get_smarthome_items(), key=lambda k: str.lower(k['_path'])), smarthome_item_count=len(self.plugin.get_fritz_device().get_smarthome_items()), p=self.plugin, - webif_pagelength=self.plugin.webif_pagelength, + webif_pagelength=pagelength, ) @cherrypy.expose @@ -105,6 +111,8 @@ def get_data_html(self, dataSet=None): for item in self.call_monitor_items: data['call_monitor'][item.id()] = {} data['call_monitor'][item.id()]['value'] = item() + data['call_monitor'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') + data['call_monitor'][item.id()]['last_change'] = item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') if self.plugin.get_fritz_device().get_items(): data['avm_items'] = {} diff --git a/avm/_pv_1_5_12/webif/static/img/lamp_green.png b/avm/_pv_1_6_8/webif/static/img/lamp_green.png similarity index 100% rename from avm/_pv_1_5_12/webif/static/img/lamp_green.png rename to avm/_pv_1_6_8/webif/static/img/lamp_green.png diff --git a/avm/_pv_1_5_12/webif/static/img/lamp_red.png b/avm/_pv_1_6_8/webif/static/img/lamp_red.png similarity index 100% rename from avm/_pv_1_5_12/webif/static/img/lamp_red.png rename to avm/_pv_1_6_8/webif/static/img/lamp_red.png diff --git a/avm/_pv_1_5_12/webif/static/img/plugin_logo.png b/avm/_pv_1_6_8/webif/static/img/plugin_logo.png similarity index 100% rename from avm/_pv_1_5_12/webif/static/img/plugin_logo.png rename to avm/_pv_1_6_8/webif/static/img/plugin_logo.png diff --git a/avm/_pv_1_6_8/webif/templates/index.html b/avm/_pv_1_6_8/webif/templates/index.html new file mode 100755 index 000000000..4b909e49b --- /dev/null +++ b/avm/_pv_1_6_8/webif/templates/index.html @@ -0,0 +1,388 @@ +{% extends "base_plugin.html" %} +{% set logo_frame = false %} +{% set update_interval = 5000 %} + +{% block pluginstyles %} + +{% endblock pluginstyles %} + +{% block pluginscripts %} + + +{% endblock pluginscripts %} + + + +{% set tabcount = 6 %} + +{% set tab1title = _(""'AVM Items'" (" ~ avm_item_count ~ ") ") %} + +{% if p.aha_http_interface and smarthome_item_count > 0 %} + {% set tab2title = _(""'AVM Smarthome Items'" (" ~ smarthome_item_count ~ ") ") %} +{% else %} + {% set tab2title = "hidden" %} +{% endif %} + +{% if p.aha_http_interface %} + {% set tab3title = _(""'AVM Smarthome Devices'" (" ~ len(p._fritz_device._smarthome_devices) ~ ") ") %} +{% else %} + {% set tab3title = "hidden" %} +{% endif %} + +{% if p._call_monitor and call_monitor_item_count > 0 %} + {% set tab4title = _(""'Call Monitor Items'" (" ~ call_monitor_item_count ~ ") ") %} +{% else %} + {% set tab4title = "hidden" %} +{% endif %} + +{% set tab5title = _(""'Log-Einträge'"") %} + +{% set tab6title = _(""'Plugin-API'"") %} + + + +{% set language = p.get_sh().get_defaultlanguage() %} +{% if language not in ['en','de'] %} + {% set language = 'en' %} +{% endif %} + +{% block headtable %} + + + + + + + + + + + + + + + + + + + + + + +
+ {% if p.get_fritz_device().is_available() %} + {{ _('Gerät verfügbar') }} + {% else %} + {{ _('Gerät nicht verfügbar') }} + {% endif %} + {{ _('Verbunden') }} + + {% if p.get_fritz_device().is_available() %} + {{ _('Ja') }}{% if p._fritz_device.is_ssl() %}, SSL{% endif %} + {% else %} + {{ _('Nein') }} + {% endif %} + {{ _('Benutzer') }}{{ p.get_parameter_value_for_display('username') }}
+ {% if p._call_monitor %} + {% if p.get_monitoring_service()._listen_active %} + {{ _('Call Monitor verbunden') }} + {% else %} + {{ _('Call Monitor nicht verbunden') }} + {% endif %} + {% endif %} + {{ _('Call Monitor') }} + {% if p._call_monitor %}{{ _('Ja') }}{% if not p.get_monitoring_service()._listen_active %}, {{ _('nicht verbunden') }}{% endif %}{% else %}{{ _('Nein') }}{% endif %}{{ _('Passwort') }}{{ p.get_parameter_value_for_display('password') }}
{{ _('Host') }}{{ p._fritz_device.get_host() }}{{ _('Port') }}{{ p._fritz_device.get_port() }} {% if p._fritz_device.is_ssl() %}(HTTPS){% endif %}
+{% endblock %} + + + +{% block buttons %} + + +{% endblock buttons %} + +{% block bodytab1 %} +
+ + + + + + + + + + + + + {% for item in avm_items %} + {% set item_id = item.id() %} + {% if p.get_instance_name() %} + {% set instance_key = "avm_data_type@"+p.get_instance_name() %} + {% else %} + {% set instance_key = "avm_data_type" %} + {% endif %} + + + + + + + + + {% endfor %} + +
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item_id }}{{ item.property.type }}{{ item.conf[instance_key] }}{{ item() }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
+
+{% endblock %} + +{% block bodytab2 %} +
+ + + + + + + + + + + + + {% for item in smarthome_items %} + {% set item_id = item.id() %} + {% if p.get_instance_name() %} + {% set instance_key = "avm_data_type@"+p.get_instance_name() %} + {% else %} + {% set instance_key = "avm_data_type" %} + {% endif %} + + + + + + + + + {% endfor %} + +
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item_id }}{{ item.property.type }}{{ item.conf[instance_key] }}{{ item() }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
+
+{% endblock %} + +{% block bodytab3 %} +{% if p._fritz_device._smarthome_devices %} +
+ + + + + + + + + + {% for ain in p._fritz_device._smarthome_devices %} + + + + + + {% endfor %} + +
{{ 'No' }}{{ 'Device AIN' }}{{ 'Device Details (dict)' }}
{{ loop.index }}{{ ain }}{{ p._fritz_device._smarthome_devices[ain] }}
+
+{% endif %} +{% endblock %} + +{% block bodytab4 %} +
+ + + + + + + + + + + + + {% if p._call_monitor %} + {% for item in call_monitor_items %} + {% set item_id = item.id() %} + {% if p.get_instance_name() %} + {% set instance_key = "avm_data_type@"+p.get_instance_name() %} + {% else %} + {% set instance_key = "avm_data_type" %} + {% endif %} + + + + + + + + + {% endfor %} + {% endif %} + +
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item_id }}{{ item.property.type }}{{ item.conf[instance_key] }}{{ item() }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
+
+{% endblock %} + +{% block bodytab5 %} +
+ + + + + + + + + + + {% set logentries = p.get_device_log_from_lua_separated() %} + {% if logentries %} + {% for logentry in logentries%} + + + + + + + {% endfor %} + {% endif %} + +
{{ 'Datum/Uhrzeit' }}{{ 'Meldung' }}{{ 'Typ' }}{{ 'Kategorie' }}
{{ logentry[0] }}{{ logentry[1] }} + + {{ logentry[2] }} + + {{ _('cat_'+logentry[3]|string) }}
+
+{% endblock %} + +{% block bodytab6 %} +
+ {% for function, dict in p.metadata.plugin_functions.items() %} +
+
+ {{ dict['type'] }} {{ function }}({% if dict['parameters'] is not none %}{% for name, paramdict in dict['parameters'].items() %}{% if loop.index > 1 %}, {% endif %}{{ name }}: {{ paramdict['type'] }}{% endfor %}{% endif %}) +
+
+ {{ dict['description'][language] }}
+ {% if dict['parameters'] is not none %} +
+
+ {{ _('Parameter') }}: +
+
+
    + {% for name, paramdict in dict['parameters'].items() %} +
  • + {{ name }}: {{ paramdict['type'] }}
    + {{ paramdict['description'][language] }} +
  • + {% endfor %} +
+
+
+ {% endif %} +
+
+ {% endfor %} +
+{% endblock %} \ No newline at end of file diff --git a/avm/item_attributes.py b/avm/item_attributes.py new file mode 100644 index 000000000..db2cdb477 --- /dev/null +++ b/avm/item_attributes.py @@ -0,0 +1,52 @@ +# !/usr/bin/env python +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Copyright 2023 Michael Wenzel +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# AVM for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +ALL_ATTRIBUTES_SUPPORTED_BY_REPEATER = ['uptime', 'software_version', 'hardware_version', 'serial_number', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info'] +ALL_ATTRIBUTES_WRITEABLE = ['reboot', 'set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle', 'tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device', 'target_temperature', 'window_open', 'hkr_boost', 'switch_state'] +ALL_ATTRIBUTES_WRITEONLY = ['set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle'] +DEPRECATED_ATTRIBUTES = ['aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version', 'boost_active'] +AHA_ATTRIBUTES = ['device_id', 'manufacturer', 'product_name', 'fw_version', 'connected', 'device_name', 'tx_busy', 'device_functions', 'set_target_temperature', 'target_temperature', 'current_temperature', 'temperature_reduced', 'temperature_comfort', 'temperature_offset', 'set_window_open', 'window_open', 'windowopenactiveendtime', 'set_hkr_boost', 'hkr_boost', 'boost_active', 'boostactiveendtime', 'summer_active', 'holiday_active', 'battery_low', 'battery_level', 'lock', 'device_lock', 'errorcode', 'set_simpleonoff', 'simpleonoff', 'set_level', 'level', 'set_levelpercentage', 'levelpercentage', 'set_hue', 'hue', 'set_saturation', 'saturation', 'set_colortemperature', 'colortemperature', 'unmapped_hue', 'unmapped_saturation', 'color_mode', 'supported_color_mode', 'fullcolorsupport', 'mapped', 'switch_state', 'switch_mode', 'switch_toggle', 'power', 'energy', 'voltage', 'humidity', 'alert_state', 'blind_mode', 'endpositionsset'] +AHA_RO_ATTRIBUTES = ['device_id', 'manufacturer', 'product_name', 'fw_version', 'connected', 'device_name', 'tx_busy', 'device_functions', 'current_temperature', 'temperature_reduced', 'temperature_comfort', 'temperature_offset', 'windowopenactiveendtime', 'boost_active', 'boostactiveendtime', 'summer_active', 'holiday_active', 'battery_low', 'battery_level', 'lock', 'device_lock', 'errorcode', 'color_mode', 'supported_color_mode', 'fullcolorsupport', 'mapped', 'switch_mode', 'power', 'energy', 'voltage', 'humidity', 'alert_state', 'blind_mode', 'endpositionsset'] +AHA_WO_ATTRIBUTES = ['set_target_temperature', 'set_window_open', 'set_hkr_boost', 'set_simpleonoff', 'set_level', 'set_levelpercentage', 'set_hue', 'set_saturation', 'set_colortemperature', 'switch_toggle'] +AHA_RW_ATTRIBUTES = ['target_temperature', 'window_open', 'hkr_boost', 'switch_state'] +TR064_ATTRIBUTES = ['uptime', 'software_version', 'hardware_version', 'serial_number', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot', 'myfritz_status', 'call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing', 'tam', 'tam_name', 'tam_new_message_number', 'tam_old_message_number', 'tam_total_message_number', 'wan_connection_status', 'wan_connection_error', 'wan_is_connected', 'wan_uptime', 'wan_ip', 'wan_upstream', 'wan_downstream', 'wan_total_packets_sent', 'wan_total_packets_received', 'wan_current_packets_sent', 'wan_current_packets_received', 'wan_total_bytes_sent', 'wan_total_bytes_received', 'wan_current_bytes_sent', 'wan_current_bytes_received', 'wan_link', 'wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode', 'wlan_total_associates', 'hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url', 'network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active', 'host_info', 'number_of_deflections', 'deflections_details', 'deflection_details', 'deflection_enable', 'deflection_type', 'deflection_number', 'deflection_to_number', 'deflection_mode', 'deflection_outgoing', 'deflection_phonebook_id', 'aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version'] +AVM_RW_ATTRIBUTES = ['tam', 'wlanconfig', 'wps_active', 'deflection_enable', 'aha_device'] +CALL_MONITOR_ATTRIBUTES = ['call_direction', 'call_event', 'monitor_trigger', 'is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming', 'is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing', 'call_duration_incoming', 'call_duration_outgoing'] +CALL_MONITOR_ATTRIBUTES_TRIGGER = ['monitor_trigger'] +CALL_MONITOR_ATTRIBUTES_GEN = ['call_direction', 'call_event'] +CALL_MONITOR_ATTRIBUTES_IN = ['is_call_incoming', 'last_caller_incoming', 'last_call_date_incoming', 'call_event_incoming', 'last_number_incoming', 'last_called_number_incoming'] +CALL_MONITOR_ATTRIBUTES_OUT = ['is_call_outgoing', 'last_caller_outgoing', 'last_call_date_outgoing', 'call_event_outgoing', 'last_number_outgoing', 'last_called_number_outgoing'] +CALL_MONITOR_ATTRIBUTES_DURATION = ['call_duration_incoming', 'call_duration_outgoing'] +WAN_CONNECTION_ATTRIBUTES = ['wan_connection_status', 'wan_connection_error', 'wan_is_connected', 'wan_uptime', 'wan_ip'] +WAN_COMMON_INTERFACE_ATTRIBUTES = ['wan_total_packets_sent', 'wan_total_packets_received', 'wan_current_packets_sent', 'wan_current_packets_received', 'wan_total_bytes_sent', 'wan_total_bytes_received', 'wan_current_bytes_sent', 'wan_current_bytes_received', 'wan_link'] +WAN_DSL_INTERFACE_ATTRIBUTES = ['wan_upstream', 'wan_downstream'] +TAM_ATTRIBUTES = ['tam', 'tam_name', 'tam_new_message_number', 'tam_old_message_number', 'tam_total_message_number'] +WLAN_CONFIG_ATTRIBUTES = ['wlanconfig', 'wlanconfig_ssid', 'wlan_guest_time_remaining', 'wlan_associates', 'wps_active', 'wps_status', 'wps_mode'] +WLAN_ATTRIBUTES = ['wlan_total_associates'] +FRITZ_DEVICE_ATTRIBUTES = ['uptime', 'software_version', 'hardware_version', 'serial_number', 'manufacturer', 'product_class', 'manufacturer_oui', 'model_name', 'description', 'device_log', 'security_port', 'reboot'] +HOST_ATTRIBUTES = ['host_info'] # host index needed +HOST_ATTRIBUTES_CHILD = ['network_device', 'device_ip', 'device_connection_type', 'device_hostname', 'connection_status', 'is_host_active'] # avm_mac needed +HOSTS_ATTRIBUTES = ['hosts_count', 'hosts_info', 'mesh_topology', 'number_of_hosts', 'hosts_url', 'mesh_url'] # no index needed +DEFLECTION_ATTRIBUTES = ['number_of_deflections', 'deflections_details', 'deflection_details', 'deflection_enable', 'deflection_type', 'deflection_number', 'deflection_to_number', 'deflection_mode', 'deflection_outgoing', 'deflection_phonebook_id'] +HOMEAUTO_RO_ATTRIBUTES = ['hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version'] +HOMEAUTO_RW_ATTRIBUTES = ['aha_device'] +HOMEAUTO_ATTRIBUTES = ['aha_device', 'hkr_device', 'set_temperature', 'temperature', 'set_temperature_reduced', 'set_temperature_comfort', 'firmware_version'] +MYFRITZ_ATTRIBUTES = ['myfritz_status'] \ No newline at end of file diff --git a/avm/item_attributes_master.py b/avm/item_attributes_master.py new file mode 100644 index 000000000..092bcf7a4 --- /dev/null +++ b/avm/item_attributes_master.py @@ -0,0 +1,223 @@ +# !/usr/bin/env python +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Copyright 2023 Michael Wenzel +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# AVM for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +# 'avm_data_type': {'interface': 'tr064', 'group': '', 'sub_group': None, 'access': '', 'type': '', 'deprecated': False, 'supported_by_repeater': False, 'description': ''}, + +AVM_DATA_TYPES = { + 'tr064': { + 'uptime': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Laufzeit des Fritzdevice in Sekunden'}, + 'software_version': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Serialnummer des Fritzdevice'}, + 'hardware_version': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Software Version'}, + 'serial_number': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Hardware Version'}, + 'manufacturer': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Hersteller'}, + 'product_class': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Produktklasse'}, + 'manufacturer_oui': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Hersteller OUI'}, + 'model_name': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Modelname'}, + 'description': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Modelbeschreibung'}, + 'device_log': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Geräte Log'}, + 'security_port': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Security Port'}, + 'reboot': {'interface': 'tr064', 'group': 'fritz_device', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Startet das Gerät neu'}, + 'myfritz_status': {'interface': 'tr064', 'group': 'myfritz', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'MyFritz Status (an/aus)'}, + 'call_direction': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'generic', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Richtung des letzten Anrufes'}, + 'call_event': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'generic', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Status des letzten Anrufes'}, + 'monitor_trigger': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'trigger', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Monitortrigger'}, + 'is_call_incoming': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'in', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Eingehender Anruf erkannt'}, + 'last_caller_incoming': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'in', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Letzter Anrufer'}, + 'last_call_date_incoming': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'in', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Zeitpunkt des letzten eingehenden Anrufs'}, + 'call_event_incoming': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'in', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Status des letzten eingehenden Anrufs'}, + 'last_number_incoming': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'in', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Nummer des letzten eingehenden Anrufes'}, + 'last_called_number_incoming': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'in', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Angerufene Nummer des letzten eingehenden Anrufs'}, + 'is_call_outgoing': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'out', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Ausgehender Anruf erkannt'}, + 'last_caller_outgoing': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'out', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Letzter angerufener Kontakt'}, + 'last_call_date_outgoing': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'out', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Zeitpunkt des letzten ausgehenden Anrufs'}, + 'call_event_outgoing': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'out', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Status des letzten ausgehenden Anrufs'}, + 'last_number_outgoing': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'out', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Nummer des letzten ausgehenden Anrufes'}, + 'last_called_number_outgoing': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'out', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Letzte verwendete Telefonnummer für ausgehenden Anruf'}, + 'call_duration_incoming': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'duration', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Dauer des eingehenden Anrufs'}, + 'call_duration_outgoing': {'interface': 'tr064', 'group': 'call_monitor', 'sub_group': 'duration', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Dauer des ausgehenden Anrufs'}, + 'tam': {'interface': 'tr064', 'group': 'tam', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'TAM an/aus'}, + 'tam_name': {'interface': 'tr064', 'group': 'tam', 'sub_group': None, 'access': 'ro', 'type': 'str ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Name des TAM'}, + 'tam_new_message_number': {'interface': 'tr064', 'group': 'tam', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Anzahl der alten Nachrichten'}, + 'tam_old_message_number': {'interface': 'tr064', 'group': 'tam', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Anzahl der neuen Nachrichten'}, + 'tam_total_message_number': {'interface': 'tr064', 'group': 'tam', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Gesamtanzahl der Nachrichten'}, + 'wan_connection_status': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'connection', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindungsstatus'}, + 'wan_connection_error': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'connection', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindungsfehler'}, + 'wan_is_connected': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'connection', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindung aktiv'}, + 'wan_uptime': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'connection', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindungszeit'}, + 'wan_ip': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'connection', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN IP Adresse'}, + 'wan_upstream': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'dsl_interface', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Upstream Datenmenge'}, + 'wan_downstream': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'dsl_interface', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Downstream Datenmenge'}, + 'wan_total_packets_sent': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'common_interface', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindung-Anzahl insgesamt versendeter Pakete'}, + 'wan_total_packets_received': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'common_interface', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindung-Anzahl insgesamt empfangener Pakete'}, + 'wan_current_packets_sent': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'common_interface', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindung-Anzahl aktuell versendeter Pakete'}, + 'wan_current_packets_received': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'common_interface', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindung-Anzahl aktuell empfangener Pakete'}, + 'wan_total_bytes_sent': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'common_interface', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindung-Anzahl insgesamt versendeter Bytes'}, + 'wan_total_bytes_received': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'common_interface', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindung-Anzahl insgesamt empfangener Bytes'}, + 'wan_current_bytes_sent': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'common_interface', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindung-Anzahl aktuelle Bitrate Senden'}, + 'wan_current_bytes_received': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'common_interface', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Verbindung-Anzahl aktuelle Bitrate Empfangen'}, + 'wan_link': {'interface': 'tr064', 'group': 'wan', 'sub_group': 'common_interface', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'WAN Link'}, + 'wlanconfig': {'interface': 'tr064', 'group': 'wlan_config', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'WLAN An/Aus'}, + 'wlanconfig_ssid': {'interface': 'tr064', 'group': 'wlan_config', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'WLAN SSID'}, + 'wlan_guest_time_remaining': {'interface': 'tr064', 'group': 'wlan_config', 'sub_group': None, 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbleibende Zeit, bis zum automatischen Abschalten des Gäste-WLAN'}, + 'wlan_associates': {'interface': 'tr064', 'group': 'wlan_config', 'sub_group': None, 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Anzahl der verbundenen Geräte im jeweiligen WLAN'}, + 'wps_active': {'interface': 'tr064', 'group': 'wlan_config', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Schaltet WPS für das entsprechende WlAN an / aus'}, + 'wps_status': {'interface': 'tr064', 'group': 'wlan_config', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'WPS Status des entsprechenden WlAN'}, + 'wps_mode': {'interface': 'tr064', 'group': 'wlan_config', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'WPS Modus des entsprechenden WlAN'}, + 'wlan_total_associates': {'interface': 'tr064', 'group': 'wlan', 'sub_group': None, 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Anzahl der verbundenen Geräte im WLAN'}, + 'hosts_count': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Anzahl der Hosts'}, + 'hosts_info': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'dict', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Informationen über die Hosts'}, + 'mesh_topology': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'dict', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Topologie des Mesh'}, + 'number_of_hosts': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, + 'hosts_url': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, + 'mesh_url': {'interface': 'tr064', 'group': 'host', 'sub_group': 'gen', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, + 'network_device': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus // Defines Network device via MAC-Adresse'}, + 'device_ip': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Geräte-IP (Muss Child von "network_device" sein'}, + 'device_connection_type': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungstyp (Muss Child von "network_device" sein'}, + 'device_hostname': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Gerätename (Muss Child von "network_device" sein'}, + 'connection_status': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, + 'is_host_active': {'interface': 'tr064', 'group': 'host', 'sub_group': 'child', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, + 'host_info': {'interface': 'tr064', 'group': 'host', 'sub_group': 'host', 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': True, 'description': 'Verbindungsstatus (Muss Child von "network_device" sein'}, + 'number_of_deflections': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'gen', 'access': 'ro', 'type': 'num', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Anzahl der eingestellten Rufumleitungen'}, + 'deflections_details': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'gen', 'access': 'ro', 'type': 'dict', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Details zu allen Rufumleitung (als dict)'}, + 'deflection_details': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'single', 'access': 'ro', 'type': 'dict', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Details zur Rufumleitung (als dict); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item'}, + 'deflection_enable': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'single', 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Rufumleitung Status an/aus; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item'}, + 'deflection_type': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'single', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Type der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item'}, + 'deflection_number': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'single', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Telefonnummer, die umgeleitet wird; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item'}, + 'deflection_to_number': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'single', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Zielrufnummer der Umleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item'}, + 'deflection_mode': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'single', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Modus der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item'}, + 'deflection_outgoing': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'single', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Outgoing der Rufumleitung; Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item'}, + 'deflection_phonebook_id': {'interface': 'tr064', 'group': 'deflection', 'sub_group': 'single', 'access': 'ro', 'type': 'str', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Phonebook_ID der Zielrufnummer (Only valid if Type==fromPB); Angabe der Rufumleitung mit Parameter "avm_deflection_index" im Item bzw Parent-Item'}, + 'aha_device': {'interface': 'tr064', 'group': 'homeauto', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': True, 'supported_by_repeater': False, 'description': 'Steckdose schalten; siehe "switch_state"'}, + 'hkr_device': {'interface': 'tr064', 'group': 'homeauto', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': True, 'supported_by_repeater': False, 'description': 'Status des HKR (OPEN; CLOSED; TEMP)'}, + 'set_temperature': {'interface': 'tr064', 'group': 'homeauto', 'sub_group': None, 'access': 'ro', 'type': 'num', 'deprecated': True, 'supported_by_repeater': False, 'description': 'siehe "target_temperature"'}, + 'temperature': {'interface': 'tr064', 'group': 'homeauto', 'sub_group': None, 'access': 'ro', 'type': 'num', 'deprecated': True, 'supported_by_repeater': False, 'description': 'siehe "current_temperature"'}, + 'set_temperature_reduced': {'interface': 'tr064', 'group': 'homeauto', 'sub_group': None, 'access': 'ro', 'type': 'num', 'deprecated': True, 'supported_by_repeater': False, 'description': 'siehe "temperature_reduced"'}, + 'set_temperature_comfort': {'interface': 'tr064', 'group': 'homeauto', 'sub_group': None, 'access': 'ro', 'type': 'num', 'deprecated': True, 'supported_by_repeater': False, 'description': 'siehe "temperature_comfort"'}, + 'firmware_version': {'interface': 'tr064', 'group': 'homeauto', 'sub_group': None, 'access': 'ro', 'type': 'str', 'deprecated': True, 'supported_by_repeater': False, 'description': 'siehe "fw_version"'}, + }, + 'aha': { + 'device_id': {'interface': 'aha', 'group': 'device', 'sub_group': None, 'access': 'ro', 'type': 'str ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Geräte -ID'}, + 'manufacturer': {'interface': 'aha', 'group': 'device', 'sub_group': None, 'access': 'ro', 'type': 'str ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hersteller'}, + 'product_name': {'interface': 'aha', 'group': 'device', 'sub_group': None, 'access': 'ro', 'type': 'str ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Produktname'}, + 'fw_version': {'interface': 'aha', 'group': 'device', 'sub_group': None, 'access': 'ro', 'type': 'str ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Firmware Version'}, + 'connected': {'interface': 'aha', 'group': 'device', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Verbindungsstatus'}, + 'device_name': {'interface': 'aha', 'group': 'device', 'sub_group': None, 'access': 'ro', 'type': 'str ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Gerätename'}, + 'tx_busy': {'interface': 'aha', 'group': 'device', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Verbindung aktiv'}, + 'device_functions': {'interface': 'aha', 'group': 'device', 'sub_group': None, 'access': 'ro', 'type': 'list', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Im Gerät vorhandene Funktionen'}, + 'set_target_temperature': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Soll-Temperatur Setzen'}, + 'target_temperature': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'rw', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Soll-Temperatur (Status und Setzen)'}, + 'current_temperature': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Ist-Temperatur'}, + 'temperature_reduced': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Eingestellte reduzierte Temperatur'}, + 'temperature_comfort': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Eingestellte Komfort-Temperatur'}, + 'temperature_offset': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Eingestellter Temperatur-Offset'}, + 'set_window_open': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Window Open" Funktionen Setzen'}, + 'window_open': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Window Open" Funktion (Status und Setzen)'}, + 'windowopenactiveendtime': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Zeitliches Ende der "Window Open" Funktion'}, + 'set_hkr_boost': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Boost" Funktion Setzen'}, + 'hkr_boost': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Boost" Funktion (Status aund Setzen)'}, + 'boost_active': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': True, 'supported_by_repeater': False, 'description': 'Status der "Boost" Funktion'}, + 'boostactiveendtime': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Zeitliches Ende der "Boost" Funktion'}, + 'summer_active': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Status der "Sommer" Funktion'}, + 'holiday_active': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Status der "Holiday" Funktion'}, + 'battery_low': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': '"Battery low" Status'}, + 'battery_level': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Batterie-Status in %'}, + 'lock': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Tastensperre über UI/API aktiv'}, + 'device_lock': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Tastensperre direkt am Gerät ein'}, + 'errorcode': {'interface': 'aha', 'group': 'hkr', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Fehlercodes die der HKR liefert'}, + 'set_simpleonoff': {'interface': 'aha', 'group': 'simpleonoff', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Gerät/Aktor/Lampe an-/ausschalten'}, + 'simpleonoff': {'interface': 'aha', 'group': 'simpleonoff', 'sub_group': None, 'access': 'wr', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Gerät/Aktor/Lampe (Status und Setzen)'}, + 'set_level': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0 bis 255 Setzen'}, + 'level': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0 bis 255 (Setzen & Status)'}, + 'set_levelpercentage': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0% bis 100% Setzen'}, + 'levelpercentage': {'interface': 'aha', 'group': 'level', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Level/Niveau von 0% bis 100% (Setzen & Status)'}, + 'set_hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue Setzen'}, + 'hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue (Status und Setzen)'}, + 'set_saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation Setzen'}, + 'saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation (Status und Setzen)'}, + 'set_colortemperature': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wo', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Farbtemperatur Setzen'}, + 'colortemperature': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Farbtemperatur (Status und Setzen)'}, + 'unmapped_hue': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Hue (Status und Setzen)'}, + 'unmapped_saturation': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'wr', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Saturation (Status und Setzen)'}, + 'color_mode': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode)'}, + 'supported_color_mode': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Unterstützer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode)'}, + 'fullcolorsupport': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Lampe unterstützt setunmappedcolor'}, + 'mapped': {'interface': 'aha', 'group': 'color', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'von den Colordefaults abweichend zugeordneter HueSaturation-Wert gesetzt'}, + 'switch_state': {'interface': 'aha', 'group': 'switch', 'sub_group': None, 'access': 'rw', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Schaltzustand Steckdose (Status und Setzen)'}, + 'switch_mode': {'interface': 'aha', 'group': 'switch', 'sub_group': None, 'access': 'ro', 'type': 'str ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Zeitschaltung oder manuell schalten'}, + 'switch_toggle': {'interface': 'aha', 'group': 'switch', 'sub_group': None, 'access': 'wo', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Schaltzustand umschalten (toggle)'}, + 'power': {'interface': 'aha', 'group': 'powermeter', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Leistung in W (Aktualisierung alle 2 min)'}, + 'energy': {'interface': 'aha', 'group': 'powermeter', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'absoluter Verbrauch seit Inbetriebnahme in Wh'}, + 'voltage': {'interface': 'aha', 'group': 'powermeter', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Spannung in V (Aktualisierung alle 2 min)'}, + 'humidity': {'interface': 'aha', 'group': 'humidity', 'sub_group': None, 'access': 'ro', 'type': 'num ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'Relative Luftfeuchtigkeit in % (FD440)'}, + 'alert_state': {'interface': 'aha', 'group': 'alarm', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'letzter übermittelter Alarmzustand'}, + 'blind_mode': {'interface': 'aha', 'group': 'blind', 'sub_group': None, 'access': 'ro', 'type': 'str ', 'deprecated': False, 'supported_by_repeater': False, 'description': 'automatische Zeitschaltung oder manuell fahren'}, + 'endpositionsset': {'interface': 'aha', 'group': 'blind', 'sub_group': None, 'access': 'ro', 'type': 'bool', 'deprecated': False, 'supported_by_repeater': False, 'description': 'ist die Endlage für das Rollo konfiguriert'}, + } +} + + +def get_attrs(ifaces: list = ['tr064', 'aha'], sub_dict: dict = {}) -> list: + attributes = [] + for iface in ifaces: + for avm_data_type in AVM_DATA_TYPES[iface]: + if sub_dict.items() <= AVM_DATA_TYPES[iface][avm_data_type].items(): + attributes.append(avm_data_type) + return attributes + +def export_avm_data(): + ATTRS = {} + ATTRS['ALL_ATTRIBUTES_SUPPORTED_BY_REPEATER'] = get_attrs(sub_dict={'supported_by_repeater': True}) + ATTRS['ALL_ATTRIBUTES_WRITEABLE'] = get_attrs(sub_dict={'access': 'wo'}) + get_attrs(sub_dict={'access': 'rw'}) + ATTRS['ALL_ATTRIBUTES_WRITEONLY'] = get_attrs(sub_dict={'access': 'wo'}) + ATTRS['DEPRECATED_ATTRIBUTES'] = get_attrs(sub_dict={'deprecated': True}) + ATTRS['AHA_ATTRIBUTES'] = get_attrs(['aha']) + ATTRS['AHA_RO_ATTRIBUTES'] = get_attrs(['aha'], {'access': 'ro'}) + ATTRS['AHA_WO_ATTRIBUTES'] = get_attrs(['aha'], {'access': 'wo'}) + ATTRS['AHA_RW_ATTRIBUTES'] = get_attrs(['aha'], {'access': 'rw'}) + ATTRS['TR064_ATTRIBUTES'] = get_attrs(['tr064']) + ATTRS['AVM_RW_ATTRIBUTES'] = get_attrs(['tr064'], {'access': 'rw'}) + ATTRS['CALL_MONITOR_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'call_monitor'}) + ATTRS['CALL_MONITOR_ATTRIBUTES_TRIGGER'] = get_attrs(['tr064'], {'group': 'call_monitor', 'sub_group': 'trigger'}) + ATTRS['CALL_MONITOR_ATTRIBUTES_GEN'] = get_attrs(['tr064'], {'group': 'call_monitor', 'sub_group': 'generic'}) + ATTRS['CALL_MONITOR_ATTRIBUTES_IN'] = get_attrs(['tr064'], {'group': 'call_monitor', 'sub_group': 'in'}) + ATTRS['CALL_MONITOR_ATTRIBUTES_OUT'] = get_attrs(['tr064'], {'group': 'call_monitor', 'sub_group': 'out'}) + ATTRS['CALL_MONITOR_ATTRIBUTES_DURATION'] = get_attrs(['tr064'], {'group': 'call_monitor', 'sub_group': 'duration'}) + ATTRS['WAN_CONNECTION_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'wan', 'sub_group': 'connection'}) + ATTRS['WAN_COMMON_INTERFACE_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'wan', 'sub_group': 'common_interface'}) + ATTRS['WAN_DSL_INTERFACE_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'wan', 'sub_group': 'dsl_interface'}) + ATTRS['TAM_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'tam'}) + ATTRS['WLAN_CONFIG_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'wlan_config'}) + ATTRS['WLAN_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'wlan'}) + ATTRS['FRITZ_DEVICE_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'fritz_device'}) + ATTRS['HOST_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'host', 'sub_group': 'info'}) + ATTRS['HOSTS_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'host', 'sub_group': 'gen'}) + ATTRS['HOST_ATTRIBUTES_CHILD'] = get_attrs(['tr064'], {'group': 'host', 'sub_group': 'child'}) + ATTRS['DEFLECTION_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'deflection'}) + ATTRS['HOMEAUTO_RO_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'homeauto', 'access': 'ro'}) + ATTRS['HOMEAUTO_RW_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'homeauto', 'access': 'rw'}) + ATTRS['HOMEAUTO_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'homeauto'}) + ATTRS['MYFRITZ_ATTRIBUTES'] = get_attrs(['tr064'], {'group': 'myfritz'}) + + for attr, alist in ATTRS.items(): + print(f'{attr} = {alist!r}') + +if __name__ == '__main__': + export_avm_data() diff --git a/avm/locale.yaml b/avm/locale.yaml old mode 100755 new mode 100644 diff --git a/avm/plugin.yaml b/avm/plugin.yaml old mode 100755 new mode 100644 index 22e463c4a..00737b2e9 --- a/avm/plugin.yaml +++ b/avm/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: http://smarthomeng.de/user/plugins/avm/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/934835-avm-plugin - version: 2.0.0 # Plugin version (must match the version specified in __init__.py) + version: 2.0.2 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.8 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) # py_minversion: 3.6 # minimum Python version to use for this plugin @@ -76,8 +76,8 @@ parameters: type: str default: '' description: - de: '(optional) Spezielle Rufnummern ausfiltern, die vom Callmonitor ignoriert werden sollen.' - en: '(optional) Filter only specific numbers to be watched by call monitor' + de: '(optional) Filter, auf welche eigenen Rufnummern (oder Teile davon) der Callmonitor reagieren soll. Ist der Filter leer, werden alle eigenen Rufnummern überwacht. Wird ein Filterstring bspw. "12234" angegeben, werden nur die eigenen Anschlussnummern, die "12234" enthalten, vom CallMonitor verarbeitet.' + en: '(optional) Filter, for which numbers (or part of the number) of own telephone connection the Callmonitor should react.' avm_home_automation: type: bool default: false @@ -90,6 +90,12 @@ parameters: description: de: '(optional) Anzahl der Log-Messages, die verarbeitet/bereitgestellt werden. 0 = alle' en: '(optional) Amount of Log-Messages, witch will be displayed. 0 = all' + tr064_item_blacklist: + type: bool + default: False + description: + de: '(optional) Wenn aktiv, werden TR064 Items, deren Abfrageergebnis 2x zu einen Fehler geführt hat, blacklisted und anschließend nicht mehr abgefragt.' + en: '(optional) If active, TR064 Items for which data polling resulted in errors, will be blacklisted and excluded from update cycle' item_attributes: # Definition of item attributes defined by this plugin @@ -145,8 +151,8 @@ item_attributes: - 'wan_current_packets_received' # r/o num WAN Verbindung-Anzahl aktuell empfangener Pakete - 'wan_total_bytes_sent' # r/o num WAN Verbindung-Anzahl insgesamt versendeter Bytes - 'wan_total_bytes_received' # r/o num WAN Verbindung-Anzahl insgesamt empfangener Bytes - - 'wan_current_bytes_sent' # r/o num WAN Verbindung-Anzahl insgesamt versendeter Bytes - - 'wan_current_bytes_received' # r/o num WAN Verbindung-Anzahl insgesamt empfangener Bytes + - 'wan_current_bytes_sent' # r/o num WAN Verbindung-Anzahl aktuelle Bitrate Senden + - 'wan_current_bytes_received' # r/o num WAN Verbindung-Anzahl aktuelle Bitrate Empfangen - 'wan_link' # r/o bool WAN Link # WLAN Config Attribute Hinweis: alle Attribute benötigen zusätzlich das Attribut 'avm_wlan_index' - 'wlanconfig' # r/w bool WLAN An/Aus @@ -225,23 +231,34 @@ item_attributes: - 'level' # w/r num Level/Niveau von 0 bis 255 (Setzen & Status) - 'set_levelpercentage' # w/o num Level/Niveau von 0% bis 100% Setzen - 'levelpercentage' # w/r num Level/Niveau von 0% bis 100% (Setzen & Status) + - - 'set_hue' # w/o num Hue Setzen - 'hue' # w/r num Hue (Status und Setzen) - 'set_saturation' # w/o num Saturation Setzen - 'saturation' # w/r num Saturation (Status und Setzen) - 'set_colortemperature' # w/o num Farbtemperatur Setzen - 'colortemperature' # w/r num Farbtemperatur (Status und Setzen) - + - 'unmapped_hue' # w/r num Hue (Status und Setzen) + - 'unmapped_saturation' # w/r num Saturation (Status und Setzen) + - 'color_mode' # r/o num Aktueller Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) + - 'supported_color_mode' # r/o num Unterstützer Farbmodus (1-HueSaturation-Mode; 4-Farbtemperatur-Mode) + - 'fullcolorsupport' # r/o bool Lampe unterstützt setunmappedcolor + - 'mapped' # r/o bool von den Colordefaults abweichend zugeordneter HueSaturation-Wert gesetzt + - 'switch_state' # r/w bool Schaltzustand Steckdose (Status und Setzen) - - 'switch_mode' # r/o bool Zeitschaltung oder manuell schalten + - 'switch_mode' # r/o str Zeitschaltung oder manuell schalten - 'switch_toggle' # w/o bool Schaltzustand umschalten (toggle) - 'power' # r/o num Leistung in W (Aktualisierung alle 2 min) - - 'energy' # r/o num absoluter Verbrauch seit Inbetriebnahme + - 'energy' # r/o num absoluter Verbrauch seit Inbetriebnahme in Wh - 'voltage' # r/o num Spannung in V (Aktualisierung alle 2 min) - - 'humidity' # r/o num Relative Luftfeuchtigkeit in % (FD440) - - 'alert_state' # r/o bool letzter übermittelter Alarmzustand + - 'humidity' # r/o num Relative Luftfeuchtigkeit in % (FD440) + + - 'alert_state' # r/o bool letzter übermittelter Alarmzustand + + - 'blind_mode' # r/o str automatische Zeitschaltung oder manuell fahren + - 'endpositionsset' # r/o bool ist die Endlage für das Rollo konfiguriert avm_incoming_allowed: type: str @@ -494,6 +511,14 @@ item_structs: type: num visu_acl: ro avm_data_type@instance: wan_total_bytes_received + current_bytes_sent: + type: num + visu_acl: ro + avm_data_type@instance: wan_current_bytes_sent + current_bytes_receive: + type: num + visu_acl: ro + avm_data_type@instance: wan_current_bytes_received link: type: bool visu_acl: ro @@ -658,6 +683,11 @@ item_structs: avm_data_type@instance: temperature_offset type: num + aha_humidity_sensor: + humidity: + avm_data_type@instance: humidity + type: num + aha_alert: state: avm_data_type@instance: alert @@ -667,6 +697,9 @@ item_structs: switch_state: avm_data_type@instance: switch_state type: bool + switch_mode: + avm_data_type@instance: switch_mode + type: str switch_toggle: avm_data_type@instance: switch_toggle type: bool @@ -683,6 +716,64 @@ item_structs: avm_data_type@instance: voltage type: num + aha_level: + level: + avm_data_type@instance: level + type: num + level_percentage: + avm_data_type@instance: levelpercentage + type: num + + aha_blind: + blind_mode: + avm_data_type@instance: blind_mode + type: str + endpositionsset: + avm_data_type@instance: endpositionsset + type: bool + + aha_on_off: + on_off: + avm_data_type@instance: simpleonoff + type: bool + + aha_button: + battery_low: + avm_data_type@instance: battery_low + type: bool + battery_level: + avm_data_type@instance: battery_level + type: num + + aha_color: + color_mode: + avm_data_type@instance: color_mode + type: num + supported_color_mode: + avm_data_type@instance: supported_color_mode + type: num + fullcolorsupport: + avm_data_type@instance: fullcolorsupport + type: bool + mapped: + avm_data_type@instance: mapped + type: bool + hue: + avm_data_type@instance: hue + type: num + saturation: + avm_data_type@instance: saturation + type: num + unmapped_hue: + avm_data_type@instance: unmapped_hue + type: bool + unmapped_saturation: + avm_data_type@instance: unmapped_saturation + type: bool + colortemperature: + avm_data_type@instance: colortemperature + type: num + #item_attribute_prefixes: # Definition of item attributes that only have a common prefix (enter 'item_attribute_prefixes: NONE' or ommit this section, if section should be empty) # NOTE: This section should only be used, if really necessary (e.g. for the stateengine plugin) diff --git a/avm/requirements.txt b/avm/requirements.txt old mode 100755 new mode 100644 index b4531941f..3437857be --- a/avm/requirements.txt +++ b/avm/requirements.txt @@ -1,2 +1,2 @@ -requests -lxml \ No newline at end of file +requests +lxml>=4.9.2 diff --git a/avm/sv_widgets/widget_avm.html b/avm/sv_widgets/widget_avm.html old mode 100755 new mode 100644 diff --git a/avm/tr064/action.py b/avm/tr064/action.py new file mode 100644 index 000000000..07cad0628 --- /dev/null +++ b/avm/tr064/action.py @@ -0,0 +1,112 @@ +"""TR-064 action.""" +from io import BytesIO +import lxml.etree as ET +import requests + +from .config import TR064_SERVICE_NAMESPACE +from .config import TR064_CONTROL_NAMESPACE +from .config import IGD_SERVICE_NAMESPACE +from .config import IGD_CONTROL_NAMESPACE +from .exceptions import TR064MissingArgumentException, TR064UnknownArgumentException +from .attribute_dict import AttributeDict + + +# pylint: disable=too-many-instance-attributes, too-few-public-methods +class Action: + """TR-064 action. + + :param lxml.etree.Element xml: XML action element + :param HTTPBasicAuthHandler auth: HTTPBasicAuthHandler object, e.g. HTTPDigestAuth + :param str base_url: URL to router. + :param str name: Action name + :param str service_type: Service type + :param str service_id: Service ID + :param str control_url: Control URL + """ + + # pylint: disable=too-many-arguments + def __init__(self, xml, auth, base_url, name, service_type, service_id, control_url, verify: bool = False, description_file='tr64desc.xml'): + self.auth = auth + self.base_url = base_url + self.name = name + self.service_type = service_type + self.service_id = service_id + self.control_url = control_url + self.verify = verify + self.description_file = description_file + self.namespaces = IGD_SERVICE_NAMESPACE if 'igd' in description_file else TR064_SERVICE_NAMESPACE + self.control_namespace = IGD_CONTROL_NAMESPACE if 'igd' in description_file else TR064_CONTROL_NAMESPACE + + ET.register_namespace('s', 'http://schemas.xmlsoap.org/soap/envelope/') + ET.register_namespace('h', 'http://soap-authentication.org/digest/2001/10/') + + self.headers = {'content-type': 'text/xml; charset="utf-8"'} + self.envelope = ET.Element( + '{http://schemas.xmlsoap.org/soap/envelope/}Envelope', + attrib={ + '{http://schemas.xmlsoap.org/soap/envelope/}encodingStyle': + 'http://schemas.xmlsoap.org/soap/encoding/'}) + self.body = ET.SubElement(self.envelope, '{http://schemas.xmlsoap.org/soap/envelope/}Body') + + self.in_arguments = {} + self.out_arguments = {} + + for argument in xml.findall('./argumentList/argument', namespaces=self.namespaces): + name = argument.findtext('name', namespaces=self.namespaces) + direction = argument.findtext('direction', namespaces=self.namespaces) + + if direction == 'in': + self.in_arguments[name.replace('-', '_')] = name + + if direction == 'out': + self.out_arguments[name] = name.replace('-', '_') + + def __call__(self, **kwargs): + missing_arguments = self.in_arguments.keys() - kwargs.keys() + if missing_arguments: + raise TR064MissingArgumentException( + 'Missing argument(s) \'' + "', '".join(missing_arguments) + '\'') + + unknown_arguments = kwargs.keys() - self.in_arguments.keys() + if unknown_arguments: + raise TR064UnknownArgumentException( + 'Unknown argument(s) \'' + "', '".join(unknown_arguments) + '\'') + + # Add SOAP action to header + self.headers['soapaction'] = '"{}#{}"'.format(self.service_type, self.name) + ET.register_namespace('u', self.service_type) + + # Prepare body for request + self.body.clear() + action = ET.SubElement(self.body, '{{{}}}{}'.format(self.service_type, self.name)) + for key in kwargs: + arg = ET.SubElement(action, self.in_arguments[key]) + arg.text = str(kwargs[key]) + + # soap._InitChallenge(header) + data = ET.tostring(self.envelope, encoding='utf-8', xml_declaration=True).decode() + request = requests.post('{0}{1}'.format(self.base_url, self.control_url), + headers=self.headers, + auth=self.auth, + data=data, + verify=self.verify) + if request.status_code != 200: + try: + xml = ET.parse(BytesIO(request.content)) + except Exception: + return request.status_code + try: + error_code = int(xml.find(f".//{{{self.control_namespace['']}}}errorCode").text) + except Exception: + error_code = None + pass + # self.logger.debug(f"status_code={request.status_code}, error_code={error_code.text}") + return error_code if error_code is not None else request.status_code + + # Translate response and prepare dict + xml = ET.parse(BytesIO(request.content)) + response = AttributeDict() + for arg in list(xml.find('.//{{{}}}{}Response'.format(self.service_type, self.name))): + name = self.out_arguments[arg.tag] + response[name] = arg.text + return response diff --git a/avm/tr064/attribute_dict.py b/avm/tr064/attribute_dict.py new file mode 100644 index 000000000..f7baddd5a --- /dev/null +++ b/avm/tr064/attribute_dict.py @@ -0,0 +1,8 @@ +"""TR-064 attribute dict.""" + + +class AttributeDict(dict): + """Direct access dict entries like attributes.""" + + def __getattr__(self, name): + return self[name] diff --git a/avm/tr064/client.py b/avm/tr064/client.py new file mode 100644 index 000000000..0b5a9baad --- /dev/null +++ b/avm/tr064/client.py @@ -0,0 +1,53 @@ +"""TR-064 client""" +from io import BytesIO +import lxml.etree as ET +import requests +from requests.auth import HTTPDigestAuth + +from .config import TR064_DEVICE_NAMESPACE +from .config import IGD_DEVICE_NAMESPACE +from .exceptions import TR064UnknownDeviceException +from .device import Device + + +# pylint: disable=too-few-public-methods +class Client: + """TR-064 client. + + :param str username: Username with access to router. + :param str password: Passwort to access router. + :param str base_url: URL to router. + """ + + def __init__(self, username, password, base_url='https://192.168.178.1:49443', description_file='tr64desc.xml', verify: bool = False): + self.base_url = base_url + self.auth = HTTPDigestAuth(username, password) + + self.description_file = description_file + self.verify = verify + self.devices = {} + + self.namespaces = IGD_DEVICE_NAMESPACE if 'igd' in description_file else TR064_DEVICE_NAMESPACE + + def __getattr__(self, name): + if name not in self.devices: + self._fetch_devices(self.description_file) + + if name in self.devices: + return self.devices[name] + + raise TR064UnknownDeviceException(f"Requested Device Name {name!r} not available.") + + def _fetch_devices(self, description_file='tr64desc.xml'): + """Fetch device description.""" + request = requests.get('{0}/{1}'.format(self.base_url, description_file), verify=self.verify) + # request = requests.get(f'{self.base_url}/{description_file}', verify=self.verify) + + if request.status_code == 200: + xml = ET.parse(BytesIO(request.content)) + + for device in xml.findall('.//device', namespaces=self.namespaces): + name = device.findtext('deviceType', namespaces=self.namespaces).split(':')[-2] + + if name not in self.devices: + self.devices[name] = Device(device, self.auth, self.base_url, self.verify, self.description_file) diff --git a/avm/tr064/config.py b/avm/tr064/config.py new file mode 100644 index 000000000..76d59aeee --- /dev/null +++ b/avm/tr064/config.py @@ -0,0 +1,34 @@ +"""TR-064 configuration.""" + +__all__ = [ + 'TR064_DEVICE_NAMESPACE', + 'TR064_SERVICE_NAMESPACE', + 'TR064_CONTROL_NAMESPACE', + 'IGD_DEVICE_NAMESPACE', + 'IGD_SERVICE_NAMESPACE', + 'IGD_CONTROL_NAMESPACE' +] + +TR064_DEVICE_NAMESPACE = { + '': 'urn:dslforum-org:device-1-0' +} + +TR064_SERVICE_NAMESPACE = { + '': 'urn:dslforum-org:service-1-0' +} + +TR064_CONTROL_NAMESPACE = { + '': 'urn:dslforum-org:control-1-0' +} + +IGD_DEVICE_NAMESPACE = { + '': 'urn:schemas-upnp-org:device-1-0' +} + +IGD_SERVICE_NAMESPACE = { + '': 'urn:schemas-upnp-org:service-1-0' +} + +IGD_CONTROL_NAMESPACE = { + '': 'urn:schemas-upnp-org:service-1-0' +} diff --git a/avm/tr064/device.py b/avm/tr064/device.py new file mode 100644 index 000000000..8d826f2a3 --- /dev/null +++ b/avm/tr064/device.py @@ -0,0 +1,56 @@ +"""TR-064 device""" +from .config import TR064_DEVICE_NAMESPACE +from .config import IGD_DEVICE_NAMESPACE +from .exceptions import TR064UnknownServiceException +from .service import Service +from .service_list import ServiceList + + +# pylint: disable=too-few-public-methods +class Device: + """TR-064 device. + + :param lxml.etree.Element xml: + XML device element + :param HTTPBasicAuthHandler auth: + HTTPBasicAuthHandler object, e.g. HTTPDigestAuth + :param str base_url: + URL to router. + """ + + def __init__(self, xml, auth, base_url, verify: bool = False, description_file='tr64desc.xml'): + self.services = {} + self.verify = verify + self.description_file = description_file + self.namespaces = IGD_DEVICE_NAMESPACE if 'igd' in description_file else TR064_DEVICE_NAMESPACE + + for service in xml.findall('./serviceList/service', namespaces=self.namespaces): + service_type = service.findtext('serviceType', namespaces=self.namespaces) + service_id = service.findtext('serviceId', namespaces=self.namespaces) + control_url = service.findtext('controlURL', namespaces=self.namespaces) + event_sub_url = service.findtext('eventSubURL', namespaces=self.namespaces) + scpdurl = service.findtext('SCPDURL', namespaces=self.namespaces) + + name = service_type.split(':')[-2].replace('-', '_') + if name not in self.services: + self.services[name] = ServiceList() + + self.services[name].append( + Service( + auth, + base_url, + service_type, + service_id, + scpdurl, + control_url, + event_sub_url, + self.verify, + self.description_file + ) + ) + + def __getattr__(self, name): + if name in self.services: + return self.services[name] + + raise TR064UnknownServiceException(f"Requested Service Name {name!r} not available.") diff --git a/avm/tr064/exceptions.py b/avm/tr064/exceptions.py new file mode 100644 index 000000000..9608afaaa --- /dev/null +++ b/avm/tr064/exceptions.py @@ -0,0 +1,38 @@ +"""TR-064 exceptions.""" + +__all__ = [ + 'TR064UnknownDeviceException', + 'TR064UnknownServiceException', + 'TR064UnknownServiceIndexException', + 'TR064UnknownActionException', + 'TR064UnknownArgumentException', + 'TR064MissingArgumentException' +] + + +class TR064Exception(Exception): + """TR-064 base exception.""" + + +class TR064UnknownDeviceException(Exception): + """TR-064 unknown device exception.""" + + +class TR064UnknownServiceException(Exception): + """TR-064 unknown service exception.""" + + +class TR064UnknownServiceIndexException(Exception): + """TR-064 unknown service index exception.""" + + +class TR064UnknownActionException(Exception): + """TR-064 unknown action exception.""" + + +class TR064UnknownArgumentException(Exception): + """TR-064 unknown argument exception.""" + + +class TR064MissingArgumentException(Exception): + """TR-064 missing argument exception.""" diff --git a/avm/tr064/service.py b/avm/tr064/service.py new file mode 100644 index 000000000..e84cce2f7 --- /dev/null +++ b/avm/tr064/service.py @@ -0,0 +1,58 @@ +"""TR-064 service""" +from io import BytesIO +import lxml.etree as ET +import requests + +from .action import Action +from .config import TR064_SERVICE_NAMESPACE +from .config import IGD_SERVICE_NAMESPACE +from .exceptions import TR064UnknownActionException + + +# pylint: disable=too-few-public-methods, too-many-instance-attributes +class Service: + """TR-064 service.""" + + # pylint: disable=too-many-arguments + def __init__(self, auth, base_url, service_type, service_id, scpdurl, control_url, event_sub_url, verify: bool = False, description_file='tr64desc.xml'): + self.auth = auth + self.base_url = base_url + self.service_type = service_type + self.service_id = service_id + self.scpdurl = scpdurl + self.control_url = control_url + self.event_sub_url = event_sub_url + self.actions = {} + self.verify = verify + self.description_file = description_file + self.namespaces = IGD_SERVICE_NAMESPACE if 'igd' in description_file else TR064_SERVICE_NAMESPACE + + def __getattr__(self, name): + if name not in self.actions: + self._fetch_actions(self.scpdurl) + + if name in self.actions: + return self.actions[name] + + raise TR064UnknownActionException(f"Requested Action Name {name!r} not available.") + + def _fetch_actions(self, scpdurl): + """Fetch action description.""" + request = requests.get('{0}{1}'.format(self.base_url, scpdurl), verify=self.verify) + if request.status_code == 200: + xml = ET.parse(BytesIO(request.content)) + + for action in xml.findall('./actionList/action', namespaces=self.namespaces): + name = action.findtext('name', namespaces=self.namespaces) + canonical_name = name.replace('-', '_') + self.actions[canonical_name] = Action( + action, + self.auth, + self.base_url, + name, + self.service_type, + self.service_id, + self.control_url, + self.verify, + self.description_file + ) diff --git a/avm/tr064/service_list.py b/avm/tr064/service_list.py new file mode 100644 index 000000000..ebe45b804 --- /dev/null +++ b/avm/tr064/service_list.py @@ -0,0 +1,17 @@ +""".""" +from .exceptions import TR064UnknownServiceIndexException + + +class ServiceList(list): + """Service list.""" + + def __getattr__(self, name): + """Direct access to first list entry if brackets omit.""" + return self[0].__getattr__(name) + + def __getitem__(self, index): + """Overriden bracket operator to return TR-064 exception.""" + if len(self) > index: + return super().__getitem__(index) + + raise TR064UnknownServiceIndexException("Exception in ServiceListIndex") diff --git a/avm/user_doc.rst b/avm/user_doc.rst old mode 100755 new mode 100644 index c7f82a7ad..1209f40ed --- a/avm/user_doc.rst +++ b/avm/user_doc.rst @@ -1,7 +1,18 @@ +.. index:: Plugins; avm +.. index:: avm + +=== avm === +.. image:: webif/static/img/plugin_logo.png + :alt: plugin logo + :width: 300px + :height: 300px + :scale: 50 % + :align: left + Allgemeine Informationen ------------------------ @@ -34,117 +45,43 @@ und Kennwort umgestellt werden" und es sollte ein eigener User für das AVM Plug Konfiguration des Plugins --------------------------- -Die Konfiguration des Plugins erfolgt über das Admin-Interface. -Dafür stehen die folgenden Einstellungen zur Verfügung: - -- `username`: Required login information -- `password`: Required login information -- `host`: Hostname or ip address of the FritzDevice. -- `port`: Port of the FritzDevice, typically 49433 for https or 49000 for http -- `cycle`: timeperiod between two update cycles. Default is 300 seconds. -- `ssl`: True or False => True will add "https", False "http" to the URLs in the plugin -- `verify`: True or False => Turns certificate verification on or off. Typically False -- `call_monitor`: True or False => Activates or deactivates the MonitoringService, which connects to the FritzDevice's call monitor -- `call_monitor_incoming_filter`: Filter only specific numbers to be watched by call monitor -- `avm_home_automation`: True or False => Activates or deactivates the AHA Interface to communicate with HomeAutomation Devices, -- `log_entry_count`: Number of Log-Messages, which will be displayed. -- `instance`: Unique identifier for each FritzDevice / each instance of the plugin - -Alternativ kann das Plugin auch manuell konfiguriert werden. - +Diese Plugin Parameter und die Informationen zur Item-spezifischen Konfiguration des Plugins sind +unter :doc:`/plugins_doc/config/avm` beschrieben. -.. code-block:: yaml - - fb1: - plugin_name: avm - username: ... # optional - password: '...' - host: fritz.box - port: 49443 - cycle: 300 - ssl: True - verify: False - call_monitor: 'True' - call_monitor_incoming_filter: "... ## optional, don't set if you don't want to watch only one specific number with your call monitor" - avm_home_automation: 'True' - instance: fritzbox_7490 - - fb2: - plugin_name: avm - username: ... # optional - password: '...' - host: '...' - port: 49443 - cycle: 300 - ssl: True - verify: False - call_monitor: 'True' - avm_home_automation: 'False' - instance: wlan_repeater_1750 - -.. note:: Kürzere Updatezyklen können abhängig vm Fritzdevice aufgrund von CPU Auslastung und damit zu Problemen (u.a. +.. note:: Kürzere Updatezyklen können abhängig vom Fritzdevice aufgrund hoher CPU Auslastung zu Problemen (u.a. zu Nichterreichbarkeit des Webservice) führen. Wird ein kürzerer Updatezyklus benötigt, sollte das shNG Log beobachtet werden. Dort werden entsprechende Fehlermeldungen hinterlegt. -Konfiguration des Items ------------------------ - -Zur Konfiguration der Items stehen folgende Parameter zur Verfügung: - -avm_data_type -~~~~~~~~~~~~~ -This attribute defines supported functions that can be set for an item. Full set see plugin.yaml. -For most items, the avm_data_type can be bound to an instance via @... . Only in some points the items -are parsed as child items. - -avm_incoming_allowed -~~~~~~~~~~~~~~~~~~~~ -Definition der erlaubten eingehenden Rufnummer in Items vom avm_data_type `monitor_trigger`.' - -avm_target_number -~~~~~~~~~~~~~~~~~ -Definition der erlaubten angerufenen Rufnummer in Items vom avm_data_type `monitor_trigger`.' - -avm_wlan_index -~~~~~~~~~~~~~~ -Definition des Wlans ueber index: (1: 2.4Ghz, 2: 5Ghz, 3: Gaeste).' - -avm_mac -~~~~~~~ -Definition der MAC Adresse für Items vom avm_data_type `network_device`. Nur für diese Items mandatory!' - -avm_ain -~~~ -Definition der Aktor Identifikationsnummer (AIN)Items für smarthome Items. Nur für diese Items mandatory!' - -avm_tam_index -~~~~~~~~~~~~~ -Index für den Anrufbeantworter, normalerweise für den ersten eine "1". Es werden bis zu 5 Anrufbeantworter vom Gerät -unterstützt.' - -avm_deflection_index -~~~~~~~~~~~~~~~~~~~~ -Index für die Rufumleitung, normalerweise für die erste eine "1".' - - item_structs ------------ Zur Vereinfachung der Einrichtung von Items sind für folgende Item-structs vordefiniert: -- ``info`` - General Information about Fritzbox -- ``monitor`` - Call Monitor -- ``tam`` - (für einen) Anrufbeantworter -- ``deflection`` - (für eine) Rufumleitung -- ``wan`` - WAN Items -- ``wlan`` - Wireless Lan Items -- ``device`` - Items eines verbundenen Gerätes -- ``aha_general`` - Allgemeine Informationen eines AVM HomeAutomation Devices -- ``aha_hkr`` - spezifische Informationen eines AVM HomeAutomation Thermostat Devices -- ``aha_temperatur_sensor`` - spezifische Informationen eines AVM HomeAutomation Devices mit Temperatursensor -- ``aha_alert`` - spezifische Informationen eines AVM HomeAutomation Devices mit Alarmfunktion -- ``aha_switch`` - spezifische Informationen eines AVM HomeAutomation Devices mit Schalter -- ``aha_powermeter`` - spezifische Informationen eines AVM HomeAutomation Devices mit Strommessung +Fritz!Box // Fritz!Repeater mit TR-064 + - ``info`` - Allgemeine Information zur Fritz!Box oder Fritz!Repeater + - ``monitor`` - Call Monitor (nur Fritz!Box) + - ``tam`` - Anrufbeantworter (nur Fritz!Box) + - ``deflection`` - Rufumleitung (nur Fritz!Box) + - ``wan`` - WAN Verbindung (nur Fritz!Box) + - ``wlan`` - WLAN Verbimdungen (Fritz!Box und Fritz!Repeater) + - ``device`` - Information zu einem bestimmten mit der Fritz!Box oder dem Fritz!Repeater verbundenen Netzwerkgerät (Fritz!Box und Fritz!Repeater) + + +Fritz!DECT mit AHA (FRITZ!DECT 100, FRITZ!DECT 200, FRITZ!DECT 210, FRITZ!DECT 300, FRITZ!DECT 440, FRITZ!DECT 500, Comet DECT) + - ``aha_general`` - Allgemeine Informationen eines AVM HomeAutomation Devices (alle) + - ``aha_thermostat`` - spezifische Informationen eines AVM HomeAutomation Thermostat Devices (thermostat) + - ``aha_temperature_sensor`` - spezifische Informationen eines AVM HomeAutomation Devices mit Temperatursensor (temperature_sensor) + - ``aha_humidity_sensor`` - spezifische Informationen eines AVM HomeAutomation Devices mit Feuchtigkeitssensor (bspw. FRITZ!DECT 440) (humidity_sensor) + - ``aha_alert`` - spezifische Informationen eines AVM HomeAutomation Devices mit Alarmfunktion (alarm) + - ``aha_switch`` - spezifische Informationen eines AVM HomeAutomation Devices mit Schalter (switch) + - ``aha_powermeter`` - spezifische Informationen eines AVM HomeAutomation Devices mit Strommessung (powermeter) + - ``aha_level`` - spezifische Informationen eines AVM HomeAutomation Devices mit Dimmfunktion oder Höhenverstellung (dimmable_device) + - ``aha_blind`` - spezifische Informationen eines AVM HomeAutomation Devices mit Blind / Rollo (blind) + - ``aha_on_off`` - spezifische Informationen eines AVM HomeAutomation Devices mit An/Aus (on_off_device) + - ``aha_button`` - spezifische Informationen eines AVM HomeAutomation Devices mit Button (bspw. FRITZ!DECT 440) (button) + - ``aha_color`` - spezifische Informationen eines AVM HomeAutomation Devices mit Color (bspw. FRITZ!DECT 500) (color_device) + +Welche Funktionen Euer spezifisches Gerät unterstützt, könnt ihr im WebIF im Reiter "AVM AHA Devices" im "Device Details (dict)" unter "device_functions" sehen. Item Beispiel mit Verwendung der structs ohne Instanz @@ -197,7 +134,7 @@ Item Beispiel mit Verwendung der structs ohne Instanz struct: - avm.aha_general - avm.aha_thermostat - - avm.aha_temperatur_sensor + - avm.aha_temperature_sensor Item Beispiel mit Verwendung der structs mit Instanz diff --git a/avm/user_doc/assets/webif_tab1.jpg b/avm/user_doc/assets/webif_tab1.jpg old mode 100755 new mode 100644 diff --git a/avm/user_doc/assets/webif_tab2.jpg b/avm/user_doc/assets/webif_tab2.jpg old mode 100755 new mode 100644 diff --git a/avm/user_doc/assets/webif_tab3.jpg b/avm/user_doc/assets/webif_tab3.jpg old mode 100755 new mode 100644 diff --git a/avm/user_doc/assets/webif_tab4.jpg b/avm/user_doc/assets/webif_tab4.jpg old mode 100755 new mode 100644 diff --git a/avm/user_doc/assets/webif_tab5.jpg b/avm/user_doc/assets/webif_tab5.jpg old mode 100755 new mode 100644 diff --git a/avm/user_doc/assets/webif_tab6.jpg b/avm/user_doc/assets/webif_tab6.jpg old mode 100755 new mode 100644 diff --git a/avm/webif/__init__.py b/avm/webif/__init__.py old mode 100755 new mode 100644 index 9d4221f33..2b0095786 --- a/avm/webif/__init__.py +++ b/avm/webif/__init__.py @@ -7,7 +7,7 @@ # https://www.smarthomeNG.de # https://knx-user-forum.de/forum/supportforen/smarthome-py # -# Part of AVM2 Plugin +# Part of AVM Plugin # # SmartHomeNG is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -61,14 +61,14 @@ def index(self, reload=None, action=None): """ if self.plugin.fritz_device: - tr064_items = self.plugin.fritz_device.item_list + tr064_items = self.plugin.fritz_device.item_list() tr064_item_count = len(tr064_items) else: tr064_items = None tr064_item_count = None if self.plugin.fritz_home: - aha_items = self.plugin.fritz_home.item_list + aha_items = self.plugin.fritz_home.item_list() aha_item_count = len(aha_items) logentries = self.plugin.get_device_log_from_lua_separated() else: @@ -77,7 +77,7 @@ def index(self, reload=None, action=None): logentries = None if self.plugin.monitoring_service: - call_monitor_items = self.plugin.monitoring_service.item_all_list + call_monitor_items = self.plugin.monitoring_service.item_list() call_monitor_item_count = len(call_monitor_items) else: call_monitor_items = None @@ -116,7 +116,7 @@ def get_data_html(self, dataSet=None): data = dict() if self.plugin.monitoring_service: data['call_monitor'] = {} - for item in self.plugin.monitoring_service.item_all_list: + for item in self.plugin.monitoring_service.item_list(): data['call_monitor'][item.id()] = {} data['call_monitor'][item.id()]['value'] = item() data['call_monitor'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') @@ -124,19 +124,21 @@ def get_data_html(self, dataSet=None): if self.plugin.fritz_device: data['tr064_items'] = {} - for item in self.plugin.fritz_device.item_list: + for item in self.plugin.fritz_device.item_list(): data['tr064_items'][item.id()] = {} data['tr064_items'][item.id()]['value'] = item() data['tr064_items'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') data['tr064_items'][item.id()]['last_change'] = item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') + data['tr064_items_blacklistet'] = self.plugin.fritz_device.get_tr064_items_blacklisted() if self.plugin.fritz_home: data['aha_items'] = {} - for item in self.plugin.fritz_home.item_list: + for item in self.plugin.fritz_home.item_list(): data['aha_items'][item.id()] = {} data['aha_items'][item.id()]['value'] = item() data['aha_items'][item.id()]['last_update'] = item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') data['aha_items'][item.id()]['last_change'] = item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') + data['aha_last_request'] = self.plugin.fritz_home.last_request data['maintenance'] = True if self.plugin.log_level <= 20 else False @@ -155,4 +157,4 @@ def reconnect(self): @cherrypy.expose def reset_item_blacklist(self): - self.plugin.get_fritz_device.reset_item_blacklist() + self.plugin.fritz_device.reset_item_blacklist() diff --git a/avm/webif/__pycache__/__init__.cpython-38.pyc b/avm/webif/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 000000000..442ed75ec Binary files /dev/null and b/avm/webif/__pycache__/__init__.cpython-38.pyc differ diff --git a/avm/webif/static/img/lamp_green.png b/avm/webif/static/img/lamp_green.png old mode 100755 new mode 100644 diff --git a/avm/webif/static/img/lamp_red.png b/avm/webif/static/img/lamp_red.png old mode 100755 new mode 100644 diff --git a/avm/webif/static/img/plugin_logo.png b/avm/webif/static/img/plugin_logo.png old mode 100755 new mode 100644 diff --git a/lirc/webif/static/img/readme.txt b/avm/webif/static/img/readme.txt old mode 100755 new mode 100644 similarity index 100% rename from lirc/webif/static/img/readme.txt rename to avm/webif/static/img/readme.txt diff --git a/avm/webif/templates/index.html b/avm/webif/templates/index.html old mode 100755 new mode 100644 index 041b80c3e..c71bd30f2 --- a/avm/webif/templates/index.html +++ b/avm/webif/templates/index.html @@ -1,7 +1,7 @@ {% extends "base_plugin.html" %} {% set logo_frame = false %} -{% if tr064_items %} - {% set update_interval = [(((10 * (tr064_items | length)) / 1000) | round | int) * 1000, 5000]|max %} +{% if tr064_item_count > 0 %} + {% set update_interval = [(((10 * (tr064_item_count)) / 1000) | round | int) * 1000, 5000]|max %} {% else %} {% set update_interval = 5 %} {% endif %} @@ -104,6 +104,11 @@ pageResize: resize } ); console.log("Init smarthome_devicetable for page length -1, pageResize: " + resize); + maintenance_table = $('#maintenance_table').DataTable( { + pageLength: -1, + pageResize: resize + } ); + console.log("Init maintenance_table for page length -1, pageResize: " + resize); } catch (e) { console.log("Datatable JS not loaded, showing standard table without reorder option " +e); @@ -131,6 +136,9 @@ shngInsertText (item+'_call_last_update', objResponse['call_monitor'][item]['last_update'], 'call_monitortable'); shngInsertText (item+'_call_last_change', objResponse['call_monitor'][item]['last_change'], 'call_monitortable'); } + shngInsertText ('tr064_blacklisted', objResponse['tr064_items_blacklistet'], 'maintenance_table', 5); + shngInsertText ('aha_last_request', objResponse['aha_last_request'], 'maintenance_table', 5); + } } @@ -145,20 +153,20 @@ {% set tabcount = 6 %} -{% if p._fritz_device and tr064_item_count > 0 %} +{% if p.fritz_device and tr064_item_count > 0 %} {% set tab1title = _(""'AVM TR-064 Items'" (" ~ tr064_item_count ~ ") ") %} {% else %} {% set tab2title = "hidden" %} {% endif %} -{% if p._fritz_home and aha_item_count > 0 %} +{% if p.fritz_home and aha_item_count > 0 %} {% set tab2title = _(""'AVM AHA Items'" (" ~ aha_item_count ~ ") ") %} {% else %} {% set tab2title = "hidden" %} {% endif %} -{% if p._fritz_home and len(p._fritz_home._aha_devices) > 0%} - {% set tab3title = _(""'AVM AHA Devices'" (" ~ len(p._fritz_home._aha_devices) ~ ") ") %} +{% if p.fritz_home and len(p.fritz_home._devices) > 0%} + {% set tab3title = _(""'AVM AHA Devices'" (" ~ len(p.fritz_home._devices) ~ ") ") %} {% else %} {% set tab3title = "hidden" %} {% endif %} @@ -191,7 +199,7 @@ - {% if p._fritz_device %} + {% if p.fritz_device %} {{ _('Gerät verfügbar') }} {% else %} {{ _('Gerät nicht verfügbar') }} @@ -199,8 +207,8 @@ {{ _('Verbunden') }} - {% if p._fritz_device %} - {{ _('Ja') }}{% if p._fritz_device.ssl %}, SSL{% endif %} + {% if p.fritz_device %} + {{ _('Ja') }}{% if p.fritz_device.ssl %}, SSL{% endif %} {% else %} {{ _('Nein') }} {% endif %} @@ -210,7 +218,7 @@ - {% if p._monitoring_service and p._monitoring_service._listen_active %} + {% if p.monitoring_service and p.monitoring_service._listen_active %} {{ _('Call Monitor verbunden') }} {% else %} {{ _('Call Monitor nicht verbunden') }} @@ -223,10 +231,20 @@ {{ p.get_parameter_value_for_display('password') }} - {{ _('Host') }} - {{ p._fritz_device.host }} + + {{ _('Host') }} + {% if p.fritz_device.is_fritzbox() %} + {{ _(' is Fritz!Box') }} + {% elif p.fritz_device.is_repeater() %} + {{ _(' is Fritz!Repeater') }} + {% else %} + {{ _(' -') }} + {% endif %} + + + {{ p.fritz_device.host }} {{ _('Port') }} - {{ p._fritz_device.port }} {% if p._fritz_device.ssl %}(HTTPS){% endif %} + {{ p.fritz_device.port }} {% if p.fritz_device.ssl %}(HTTPS){% endif %} @@ -242,184 +260,174 @@ {% block bodytab1 %} -
- - - - - - - - - - - - - - - {% if tr064_items %} - {% for item in tr064_items %} - - - - - - - - - - - {% endfor %} - {% endif %} - -
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Cycle') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item.id() }}{{ item.property.type }}{{ p.fritz_device.item_dict[item][0] }}{{ p.fritz_device.item_dict[item][2] }}{{ item.property.value }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
-
+ + + + + + + + + + + + + + + {% if tr064_items %} + {% for item in tr064_items %} + {% set item_config = p.fritz_device.items[item] %} + + + + + + + + + + + {% endfor %} + {% endif %} + +
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Cycle') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item.id() }}{{ item.property.type }}{{ item_config[0] }}{{ item_config[2] }}{{ item.property.value }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
{% endblock %} {% block bodytab2 %} -
- - - - - - - - - - - - - - - {% if aha_items %} - {% for item in aha_items %} - - - - - - - - - - - {% endfor %} - {% endif %} - -
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Cycle') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item.id() }}{{ item.property.type }}{{ p.fritz_home.item_dict[item][0] }}{{ p.fritz_home.item_dict[item][2] }}{{ item.property.value }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
-
+ + + + + + + + + + + + + + + {% if aha_items %} + {% for item in aha_items %} + {% set item_config = p.fritz_home.items[item] %} + + + + + + + + + + + {% endfor %} + {% endif %} + +
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Cycle') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item.id() }}{{ item.property.type }}{{ item_config[0] }}{{ item_config[2] }}{{ item.property.value }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
{% endblock %} {% block bodytab3 %} -
- - - - - - - - - - - - {% if p._fritz_home %} - {% for ain in p._fritz_home._aha_devices %} - - - - - - - {% endfor %} - {% endif %} - -
{{ 'Device AIN' }}{{ '' }}{{ 'Device Details (dict)' }}
{{ ain }} - {{ p._fritz_home._aha_devices[ain] }}
-
+ + + + + + + + + + + + {% if p.fritz_home %} + {% set devices = p.fritz_home.get_devices() %} + {% for device in devices %} + + + + + + {% endfor %} + {% endif %} + +
{{ 'Device AIN' }}{{ '' }}{{ 'Device Details (dict)' }}
{{ device.ain }} + {{ p.fritz_home.get_device_by_ain(device.ain).__dict__ }}
{% endblock %} {% block bodytab4 %} -
- - - - - - - - - - - - - - {% if call_monitor_items %} - {% for item in call_monitor_items %} - {% set item_id = item.id() %} - {% if p.get_instance_name() %} - {% set instance_key = "avm_data_type@"+p.get_instance_name() %} - {% else %} - {% set instance_key = "avm_data_type" %} - {% endif %} - - - - - - - - - - {% endfor %} - {% endif %} - -
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item_id }}{{ item.property.type }}{{ item.conf[instance_key]}}{{ item.property.value }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
-
+ + + + + + + + + + + + + + {% if call_monitor_items %} + {% for item in call_monitor_items %} + {% set item_id = item.id() %} + {% if p.get_instance_name() %} + {% set instance_key = "avm_data_type@"+p.get_instance_name() %} + {% else %} + {% set instance_key = "avm_data_type" %} + {% endif %} + + + + + + + + + + {% endfor %} + {% endif %} + +
{{ _('Pfad') }}{{ _('Typ') }}{{ _('AVM Datentyp') }}{{ _('Wert') }}{{ _('Letztes Update') }}{{ _('Letzter Change') }}
{{ item_id }}{{ item.property.type }}{{ item.conf[instance_key]}}{{ item.property.value }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
{% endblock %} {% block bodytab5 %} -
- - - - - - - - - - - - {% if logentries %} - {% for logentry in logentries%} - - - - - - - - {% endfor %} - {% endif %} - -
{{ 'Datum/Uhrzeit' }}{{ 'Meldung' }}{{ 'Typ' }}{{ 'Kategorie' }}
{{ logentry[0] }}{{ logentry[1] }} - - {{ logentry[2] }} - - {{ _('cat_'+logentry[3]|string) }}
- -
+ + + + + + + + + + + + {% if logentries %} + {% for logentry in logentries%} + + + + + + + + {% endfor %} + {% endif %} + +
{{ 'Datum/Uhrzeit' }}{{ 'Meldung' }}{{ 'Typ' }}{{ 'Kategorie' }}
{{ logentry[0] }}{{ logentry[1] }} + + {{ logentry[2] }} + + {{ _('cat_'+logentry[3]|string) }}
{% endblock %} {% block bodytab6 %} -
{% if not maintenance %} {% for function, dict in p.metadata.plugin_functions.items() %}
@@ -456,46 +464,53 @@ {% endif %} {% if maintenance %} - - - - - - - - - - - - - - - - - - - - - - - - - - -
{{ 'Befehl' }}{{ 'Ergebnis' }}
{{ "fritz_device._items" }}{{ p.fritz_device._items }}
{{ "fritz_device._item_blacklist" }}{{ p.fritz_device._item_blacklist }}
{{ "_fritz_home._items" }}{{ p.fritz_home._items }}
{{ "self._data_cache" }}{{ p._fritz_device._data_cache }}
+ + + + + + + + + + + {% if p.fritz_device %} + + + + + + + + + + + {% endif %} + {% if p.fritz_home %} + + + + + + + + + + + {% endif %} + {% if p.monitoring_service %} + + + + + + + + + + + {% endif %} + +
{{ "01 TR064 Items @ Fritz_Device" }}{{ p.fritz_device.items }}
{{ "02 TR064 Blacklisted Items @ Fritz_Device " }}{{ p.fritz_device.get_tr064_items_blacklisted() }}
{{ "03 AHA Items @ Fritz_Home" }}{{ p.fritz_home.items }}
{{ " 04 AHA LastRequest XML" }}{{ p.fritz_home.last_request | string }}
{{ " 05 CM Items @ CallMonitor Incoming" }}{{ p.monitoring_service.item_list_incoming() }}
{{ " 06 CM Items @ CallMonitor Outgoing" }}{{ p.monitoring_service.item_list_outgoing() }}
{% endif %} -
{% endblock %} diff --git a/bose_soundtouch/README.md b/bose_soundtouch/README.md index ea4ebc886..c4747e611 100755 --- a/bose_soundtouch/README.md +++ b/bose_soundtouch/README.md @@ -43,8 +43,6 @@ The plugin can be configured with the following parameters: | Parameter | Description | Required | ------------- | ------------- | ------------- | -| class_name | Must be set to `BoseSoundtouch` | Yes | -| class_path | Must be set to `plugins.bose_soundtouch` | Yes | | ip | IP address of Bose Soundtouch system. e.g. `192.168.2.28` | Yes | | port | Port of Bose Soundtouch system. e.g. `8090` | - | | cycle_time | Bose Soundtouch system will we queried every X seconds. e.g. `10` | - | @@ -53,8 +51,7 @@ The following example can be used to setup a device: ```yaml bose_soundtouch: - class_name: BoseSoundtouch - class_path: plugins.bose_soundtouch + plugin_name: bose_soundtouch ip: 192.168.2.28 ``` diff --git a/buderus/README.md b/buderus/README.md index 4b6352ff3..bc0132969 100755 --- a/buderus/README.md +++ b/buderus/README.md @@ -39,8 +39,6 @@ The plugin can be configured with the following parameters: | Parameter | Description | Required | ------------- | ------------- | ------------- | -| class_name | Must be set to `Buderus` | Yes | -| class_path | Must be set to `plugins.buderus` | Yes | | host | IP address of the KM200 gateway. e.g. `192.168.2.28` | Yes | | key | Security key which must be created beforehand from your device password (printed on the KM200) and your user defined password (set in the EasyControl App): https://ssl-account.com/km200.andreashahn.info/ | Yes | | cycle_time | Information will be fetched from KM200 every X seconds. Defaults to 900, meaning an update will be pulled every 15 minutes. | - | @@ -49,8 +47,7 @@ The following example can be used to setup a device: ```yaml buderus: - class_name: Buderus - class_path: plugins.buderus + plugin_name: buderus host: 192.168.2.28 key: 90ad52660ce1234551234579d89e25b70b5331ce0e82c5fd1254a317574ec807 ``` diff --git a/cli/README.md b/cli/README.md index ea0f7a853..e797cac3a 100755 --- a/cli/README.md +++ b/cli/README.md @@ -6,8 +6,7 @@ ``` cli: - class_name: CLI - class_path: plugins.cli + plugin_name: cli # ip = 127.0.0.1 # port = 2323 # update = false diff --git a/co2meter/README.md b/co2meter/README.md index d6d3453a8..dc01ee83a 100755 --- a/co2meter/README.md +++ b/co2meter/README.md @@ -18,8 +18,7 @@ The code was adapted from the CO2Meter project Copyright 2017 by Michael Heinema ### plugin.yaml ```yaml co2meter: - class_name: CO2Meter - class_path: plugins.co2meter + plugin_name: co2meter device: '/dev/hidraw0' time_sleep: 5 ``` diff --git a/comfoair/README.md b/comfoair/README.md index 64678f799..2c7255805 100755 --- a/comfoair/README.md +++ b/comfoair/README.md @@ -23,8 +23,7 @@ This plugin has no requirements or dependencies. ``` comfoair: - class_name: ComfoAir - class_path: plugins.comfoair + plugin_name: comfoair kwltype: comfoair350 # Currently supported: comfoair350 and comfoair500 host: 192.168.123.6 # Provide host and port if you want to use TCP connection (for a TCP to serial converter) port: 5555 # Port diff --git a/dashbutton/README.md b/dashbutton/README.md index 170de4101..0e94f927e 100755 --- a/dashbutton/README.md +++ b/dashbutton/README.md @@ -1,4 +1,4 @@ -# Amazon Dashbutton Plugin +# Dashbutton Plugin ## Setup your Amazon Dashbutton @@ -44,8 +44,7 @@ Activate plugin via plugin.yaml: ```yaml dashbutton: - class_name: Dashbutton - class_path: plugins.dashbutton + plugin_name: dashbutton ``` ### Item attributes diff --git a/database/__init__.py b/database/__init__.py index c2981012c..7a79905eb 100755 --- a/database/__init__.py +++ b/database/__init__.py @@ -25,6 +25,7 @@ import copy import re +import os import datetime import functools import time @@ -1656,11 +1657,14 @@ def _initialize_db(self): {i: [self._prepare(query[0]), self._prepare(query[1])] for i, query in self._setup.items()}) self._db_initialized = True except Exception as e: - self.logger.critical("Database: Initialization failed: {}".format(e)) if self.driver.lower() == 'sqlite3': + self.logger.critical(f"Database: Initialization failed: {e}") + self.logger.error(f" - connection string={self._connect}") + self.logger.error(f" - working directory={os.getcwd()}") self._sh.restart('SmartHomeNG (Database plugin stalled)') exit(0) else: + self.logger.critical(f"Database: Initialization failed: {e}") return False # initialize db maintenance connection diff --git a/datalog/README.md b/datalog/README.md index 157243e9d..12cbf395f 100755 --- a/datalog/README.md +++ b/datalog/README.md @@ -18,8 +18,7 @@ The plugin can be configured using the following settings: ``` datalog: - class_name: DataLog - class_path: plugins.datalog + plugin_name: datalog # path: var/log/data # filepatterns: # - default:{log}-{year}-{month}-{day}.csv @@ -56,8 +55,7 @@ Example: ```yaml datalog: - class_name = DataLog - class_path = plugins.datalog + plugin_name = datalog filepatterns = default:{log}-{year}-{month}-{day}.csv | custom:{log}-{year}-{month}-{day}.txt logpatterns = csv:{time};{item};{value}\n ``` diff --git a/db_addon/__init__.py b/db_addon/__init__.py index 82d733f47..a3fa2bf73 100644 --- a/db_addon/__init__.py +++ b/db_addon/__init__.py @@ -25,14 +25,6 @@ # ######################################################################### -from lib.model.smartplugin import SmartPlugin -from lib.item import Items -from lib.item.item import Item -from lib.shtime import Shtime -from lib.plugin import Plugins -from .webif import WebInterface -import lib.db - import sqlvalidator import datetime import time @@ -42,6 +34,14 @@ from typing import Union import threading +from lib.model.smartplugin import SmartPlugin +from lib.item import Items +from lib.item.item import Item +from lib.shtime import Shtime +from lib.plugin import Plugins +from .webif import WebInterface +import lib.db + DAY = 'day' WEEK = 'week' MONTH = 'month' @@ -53,7 +53,7 @@ class DatabaseAddOn(SmartPlugin): Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '1.0.0' + PLUGIN_VERSION = '1.1.0' def __init__(self, sh): """ @@ -68,12 +68,12 @@ def __init__(self, sh): self.items = Items.get_instance() self.plugins = Plugins.get_instance() - # define properties // cache dicts + # define cache dicts self.current_values = {} # Dict to hold min and max value of current day / week / month / year for items self.previous_values = {} # Dict to hold value of end of last day / week / month / year for items self.item_cache = {} # Dict to hold item_id, oldest_log_ts and oldest_entry for items - # define properties // database, database connection, working queue and status + # define variables for database, database connection, working queue and status self.item_queue = queue.Queue() # Queue containing all to be executed items self.work_item_queue_thread = None # Working Thread for queue self._db_plugin = None # object if database plugin @@ -86,20 +86,20 @@ def __init__(self, sh): self.alive = None # Is plugin alive? self.startup_finished = False # Startup of Plugin finished self.suspended = False # Is plugin activity suspended - self._active_queue_item = '-' # String holding item path of currently executed item - - # define properties // Debugs + self.active_queue_item: str = '-' # String holding item path of currently executed item + + # define debug logs self.parse_debug = False # Enable / Disable debug logging for method 'parse item' self.execute_debug = False # Enable / Disable debug logging for method 'execute items' self.sql_debug = False # Enable / Disable debug logging for sql stuff self.onchange_debug = False # Enable / Disable debug logging for method 'handle_onchange' self.prepare_debug = False # Enable / Disable debug logging for query preparation - - # define properties // default mysql settings + + # define default mysql settings self.default_connect_timeout = 60 self.default_net_read_timeout = 60 - - # define properties // plugin parameters + + # define variables from plugin parameters self.db_configname = self.get_parameter_value('database_plugin_config') self.startup_run_delay = self.get_parameter_value('startup_run_delay') self.ignore_0 = self.get_parameter_value('ignore_0') @@ -154,6 +154,9 @@ def run(self): self.logger.info(f"Set scheduler for calculating startup-items with delay of {self.startup_run_delay + 3}s to {dt}.") self.scheduler_add('startup', self.execute_startup_items, next=dt) + # update database_items in item config, where path was given + self._update_database_items() + # set plugin to alive self.alive = True @@ -189,40 +192,39 @@ def get_database_item() -> Item: Returns item from shNG config which is an item with database attribut valid for current db_addon item """ - _lookup_item = item + _lookup_item = item.return_parent() - for i in range(3): + for i in range(2): if self.has_iattr(_lookup_item.conf, self.item_attribute_search_str): + self.logger.debug(f"Attribut '{self.item_attribute_search_str}' has been found for item={item.path()} {i + 1} level above item.") return _lookup_item else: - self.logger.debug(f"Attribut '{self.item_attribute_search_str}' has not been found for item={item.path()} {i + 1} level above item.") _lookup_item = _lookup_item.return_parent() - def get_db_addon_item() -> bool: - """ - Returns item from shNG config which is item with db_addon attribut valid for database item - - """ + def has_db_addon_item() -> bool: + """Returns item from shNG config which is item with db_addon attribut valid for database item""" for child in item.return_children(): - if _check_db_addon_fct(child): + if check_db_addon_fct(child): return True for child_child in child.return_children(): - if _check_db_addon_fct(child_child): + if check_db_addon_fct(child_child): return True for child_child_child in child_child.return_children(): - if _check_db_addon_fct(child_child_child): + if check_db_addon_fct(child_child_child): return True return False - def _check_db_addon_fct(check_item) -> bool: + def check_db_addon_fct(check_item) -> bool: + """ + Check if item has db_addon_fct and is onchange + """ if self.has_iattr(check_item.conf, 'db_addon_fct'): - __db_addon_fct = self.get_iattr_value(check_item.conf, 'db_addon_fct').lower() - if onchange_attribute(__db_addon_fct): - self.logger.debug(f"db_addon item for database item {item.id()} found.") + if self.get_iattr_value(check_item.conf, 'db_addon_fct').lower() in ALL_ONCHANGE_ATTRIBUTES: + self.logger.debug(f"db_addon item for database item {item.path()} found.") return True return False @@ -230,153 +232,170 @@ def _check_db_addon_fct(check_item) -> bool: if self.has_iattr(item.conf, 'db_addon_fct'): if self.parse_debug: - self.logger.debug(f"parse item: {item.id()} due to 'db_addon_fct'") + self.logger.debug(f"parse item: {item.path()} due to 'db_addon_fct'") - # get attribute value - _db_addon_fct = self.get_iattr_value(item.conf, 'db_addon_fct').lower() + # get db_addon_fct attribute value + db_addon_fct = self.get_iattr_value(item.conf, 'db_addon_fct').lower() - # get attribute if item should be calculated at plugin startup - _db_addon_startup = self.get_iattr_value(item.conf, 'db_addon_startup') + # get attribute value if item should be calculated at plugin startup + db_addon_startup = bool(self.get_iattr_value(item.conf, 'db_addon_startup')) # get attribute if certain value should be ignored at db query if self.has_iattr(item.conf, 'database_ignore_value'): - _db_addon_ignore_value = self.get_iattr_value(item.conf, 'database_ignore_value') + db_addon_ignore_value = self.get_iattr_value(item.conf, 'database_ignore_value') elif any(x in str(item.id()) for x in self.ignore_0): - _db_addon_ignore_value = 0 + db_addon_ignore_value = 0 else: - _db_addon_ignore_value = None + db_addon_ignore_value = None - # get database item - _database_item = get_database_item() + # get database item and return if not available + database_item_path = self.get_iattr_value(item.conf, 'db_addon_database_item') + if database_item_path is not None: + database_item = database_item_path + else: + database_item = get_database_item() + if database_item is None: + self.logger.warning(f"No database item found for {item.path()}: Item ignored. Maybe you should check instance of database plugin.") + return - # return if no database_item - if _database_item is None: - self.logger.warning(f"No database item found for {item.id()}: Item ignored. Maybe you should check instance of database plugin.") - return + # return if mandatory params for ad_addon_fct not given. + if db_addon_fct in ALL_NEED_PARAMS_ATTRIBUTES and not self.has_iattr(item.conf, 'db_addon_params'): + self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") + return - # create items configs - item_config_data_dict = {'db_addon': 'function', 'attribute': _db_addon_fct, 'database_item': _database_item, 'ignore_value': _db_addon_ignore_value} - _update_cycle = None + # create standard items config + item_config_data_dict = {'db_addon': 'function', 'db_addon_fct': db_addon_fct, 'database_item': database_item, 'ignore_value': db_addon_ignore_value} + if database_item_path is not None: + item_config_data_dict.update({'database_item_path': True}) + else: + database_item_path = database_item.path() if self.parse_debug: - self.logger.debug(f"Item '{item.id()}' added with db_addon_fct={_db_addon_fct} and database_item={_database_item.id()}") + self.logger.debug(f"Item '{item.path()}' added with db_addon_fct={db_addon_fct} and database_item={database_item_path}") - # handle items with for daily run - if daily_attribute(_db_addon_fct): - _update_cycle = 'daily' + # handle daily items + if db_addon_fct in ALL_DAILY_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'daily'}) - # handle items for weekly - elif weekly_attribute(_db_addon_fct): - _update_cycle = 'weekly' + # handle weekly items + elif db_addon_fct in ALL_WEEKLY_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'weekly'}) - # handle items for monthly run - elif monthly_attribute(_db_addon_fct): - _update_cycle = 'monthly' + # handle monthly items + elif db_addon_fct in ALL_MONTHLY_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'monthly'}) - # handle items for yearly run - elif yearly_attribute(_db_addon_fct): - _update_cycle = 'yearly' + # handle yearly items + elif db_addon_fct in ALL_YEARLY_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'yearly'}) - # handle static items starting with 'general_' - elif _db_addon_fct.startswith('general_'): - _update_cycle = 'static' + # handle static items + elif db_addon_fct in ALL_GEN_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'static'}) - # handle all functions with 'summe' like waermesumme, kaeltesumme, gruenlandtemperatursumme - elif 'summe' in _db_addon_fct: - if not self.has_iattr(item.conf, 'db_addon_params'): - self.logger.warning(f"Item '{item.id()}' with db_addon_fct={_db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") - return + # handle on-change items + elif db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: + item_config_data_dict.update({'cycle': 'on-change'}) - _db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) - if _db_addon_params is None or 'year' not in _db_addon_params: - self.logger.info(f"No 'year' for evaluation via 'db_addon_params' of item {item.id()} for function {_db_addon_fct} given. Default with 'current year' will be used.") - _db_addon_params = {} if _db_addon_params is None else _db_addon_params - _db_addon_params.update({'year': 'current'}) - - item_config_data_dict.update({'params': _db_addon_params}) - _update_cycle = 'daily' + # handle all functions with 'summe' like waermesumme, kaeltesumme, gruenlandtemperatursumme + if 'summe' in db_addon_fct: + db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) + if db_addon_params is None or 'year' not in db_addon_params: + self.logger.info(f"No 'year' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with 'current year' will be used.") + db_addon_params = {'year': 'current'} + item_config_data_dict.update({'params': db_addon_params}) + + # handle wachstumsgradtage function + elif db_addon_fct == 'wachstumsgradtage': + DEFAULT_THRESHOLD = 10 + db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) + if db_addon_params is None or 'year' not in db_addon_params: + self.logger.info(f"No 'year' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with 'current year' will be used.") + db_addon_params = {'year': 'current'} + if 'threshold' not in db_addon_params: + self.logger.info(f"No 'threshold' for evaluation via 'db_addon_params' of item {item.path()} for function {db_addon_fct} given. Default with {DEFAULT_THRESHOLD} will be used.") + db_addon_params.update({'threshold': DEFAULT_THRESHOLD}) + if not isinstance(db_addon_params['threshold'], int): + threshold = to_int(db_addon_params['threshold']) + db_addon_params['threshold'] = DEFAULT_THRESHOLD if threshold is None else threshold + item_config_data_dict.update({'params': db_addon_params}) # handle tagesmitteltemperatur - elif _db_addon_fct == 'tagesmitteltemperatur': + elif db_addon_fct == 'tagesmitteltemperatur': if not self.has_iattr(item.conf, 'db_addon_params'): - self.logger.warning(f"Item '{item.id()}' with db_addon_fct={_db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") + self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored.") return - _db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) - item_config_data_dict.update({'params': _db_addon_params}) - _update_cycle = 'daily' + db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) + if db_addon_params is None: + self.logger.warning(f"Error occurred during parsing of item attribute 'db_addon_params' of item {item.path()}. Item will be ignored.") + return + item_config_data_dict.update({'params': db_addon_params}) # handle db_request - elif _db_addon_fct == 'db_request': + elif db_addon_fct == 'db_request': if not self.has_iattr(item.conf, 'db_addon_params'): - self.logger.warning(f"Item '{item.id()}' with db_addon_fct={_db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored") + self.logger.warning(f"Item '{item.path()}' with db_addon_fct={db_addon_fct} ignored, since parameter using 'db_addon_params' not given. Item will be ignored") return - _db_addon_params = self.get_iattr_value(item.conf, 'db_addon_params') - _db_addon_params = params_to_dict(_db_addon_params) - if _db_addon_params is None: - self.logger.warning(f"Error occurred during parsing of item attribute 'db_addon_params' of item {item.id()}. Item will be ignored.") + db_addon_params = params_to_dict(self.get_iattr_value(item.conf, 'db_addon_params')) + if db_addon_params is None: + self.logger.warning(f"Error occurred during parsing of item attribute 'db_addon_params' of item {item.path()}. Item will be ignored.") return if self.parse_debug: - self.logger.debug(f"parse_item: {_db_addon_fct=} for item={item.id()}, {_db_addon_params=}") + self.logger.debug(f"parse_item: {db_addon_fct=} for item={item.path()}, {db_addon_params=}") - if not any(param in _db_addon_params for param in ('func', 'timeframe')): - self.logger.warning(f"Item '{item.id()}' with {_db_addon_fct=} ignored, not all mandatory parameters in {_db_addon_params=} given. Item will be ignored.") + if not any(param in db_addon_params for param in ('func', 'timeframe')): + self.logger.warning(f"Item '{item.path()}' with {db_addon_fct=} ignored, not all mandatory parameters in {db_addon_params=} given. Item will be ignored.") return - item_config_data_dict.update({'params': _db_addon_params}) - _timeframe = _db_addon_params.get('group', None) + TIMEFRAMES_2_UPDATECYCLE = {'day': 'daily', + 'week': 'weekly', + 'month': 'monthly', + 'year': 'yearly'} + + _timeframe = db_addon_params.get('group', None) if not _timeframe: - _timeframe = _db_addon_params.get('timeframe', None) - if _timeframe == 'day': - _update_cycle = 'daily' - elif _timeframe == 'week': - _update_cycle = 'weekly' - elif _timeframe == 'month': - _update_cycle = 'monthly' - elif _timeframe == 'year': - _update_cycle = 'yearly' - else: - self.logger.warning(f"Item '{item.id()}' with {_db_addon_fct=} ignored. Not able to detect update cycle.") + _timeframe = db_addon_params.get('timeframe', None) + update_cycle = TIMEFRAMES_2_UPDATECYCLE.get(_timeframe) + if update_cycle is None: + self.logger.warning(f"Item '{item.path()}' with {db_addon_fct=} ignored. Not able to detect update cycle.") + return - # handle on_change items - elif onchange_attribute(_db_addon_fct): - _update_cycle = 'on-change' + item_config_data_dict.update({'params': db_addon_params, 'cycle': update_cycle}) # debug log item cycle if self.parse_debug: - self.logger.debug(f"Item '{item.id()}' added to be run {_update_cycle}.") + self.logger.debug(f"Item '{item.path()}' added to be run {item_config_data_dict['cycle']}.") - # add item to be run on startup (onchange_items shall not be run at startup, but at first noticed change of item value; therefore remove for list of items to be run at startup) - if (_db_addon_startup and not onchange_attribute(_db_addon_fct)) or (_db_addon_fct.startswith('general_')): + # handle item to be run on startup (onchange_items shall not be run at startup, but at first noticed change of item value; therefore remove for list of items to be run at startup) + if (db_addon_startup and db_addon_fct not in ALL_ONCHANGE_ATTRIBUTES) or db_addon_fct in ALL_GEN_ATTRIBUTES: if self.parse_debug: - self.logger.debug(f"Item '{item.id()}' added to be run on startup") + self.logger.debug(f"Item '{item.path()}' added to be run on startup") item_config_data_dict.update({'startup': True}) else: item_config_data_dict.update({'startup': False}) # add item to plugin item dict self.add_item(item, config_data_dict=item_config_data_dict) - item_config = self.get_item_config(item) - item_config.update({'cycle': _update_cycle}) # handle all items with db_addon_info elif self.has_iattr(item.conf, 'db_addon_info'): if self.parse_debug: - self.logger.debug(f"parse item: {item.id()} due to used item attribute 'db_addon_info'") - self.add_item(item, config_data_dict={'db_addon': 'info', 'attribute': f"info_{self.get_iattr_value(item.conf, 'db_addon_info').lower()}", 'startup': True}) + self.logger.debug(f"parse item: {item.path()} due to used item attribute 'db_addon_info'") + self.add_item(item, config_data_dict={'db_addon': 'info', 'db_addon_fct': f"info_{self.get_iattr_value(item.conf, 'db_addon_info').lower()}", 'database_item': None, 'startup': True}) # handle all items with db_addon_admin elif self.has_iattr(item.conf, 'db_addon_admin'): if self.parse_debug: - self.logger.debug(f"parse item: {item.id()} due to used item attribute 'db_addon_admin'") - self.add_item(item, config_data_dict={'db_addon': 'admin', 'attribute': f"admin_{self.get_iattr_value(item.conf, 'db_addon_admin').lower()}"}) + self.logger.debug(f"parse item: {item.path()} due to used item attribute 'db_addon_admin'") + self.add_item(item, config_data_dict={'db_addon': 'admin', 'db_addon_fct': f"admin_{self.get_iattr_value(item.conf, 'db_addon_admin').lower()}", 'database_item': None}) return self.update_item # Reference to 'update_item' für alle Items mit Attribut 'database', um die on_change Items zu berechnen - elif self.has_iattr(item.conf, self.item_attribute_search_str) and get_db_addon_item(): - self.logger.debug(f"reference to update_item for item '{item}' will be set due to on-change") + elif self.has_iattr(item.conf, self.item_attribute_search_str) and has_db_addon_item(): + self.logger.debug(f"reference to update_item for item '{item.path()}' will be set due to on-change") self.add_item(item, config_data_dict={'db_addon': 'database'}) return self.update_item @@ -394,14 +413,14 @@ def update_item(self, item, caller=None, source=None, dest=None): if self.alive and caller != self.get_shortname(): # handle database items - if item in self._database_items: + if item in self._database_items(): # self.logger.debug(f"update_item was called with item {item.property.path} with value {item()} from caller {caller}, source {source} and dest {dest}") if not self.startup_finished: self.logger.info(f"Handling of 'on-change' is paused for startup. No updated will be processed.") elif self.suspended: self.logger.info(f"Plugin is suspended. No updated will be processed.") else: - self.logger.info(f"+ Updated item '{item.id()}' with value {item()} will be put to queue for processing. {self.item_queue.qsize() + 1} items to do.") + self.logger.info(f"+ Updated item '{item.path()}' with value {item()} will be put to queue for processing. {self.item_queue.qsize() + 1} items to do.") self.item_queue.put((item, item())) # handle admin items @@ -439,8 +458,8 @@ def execute_startup_items(self) -> None: self.logger.debug("execute_startup_items called") if not self.suspended: - self.logger.info(f"{len(self._startup_items)} items will be calculated at startup.") - [self.item_queue.put(i) for i in self._startup_items] + self.logger.info(f"{len(self._startup_items())} items will be calculated at startup.") + [self.item_queue.put(i) for i in self._startup_items()] self.startup_finished = True else: self.logger.info(f"Plugin is suspended. No items will be calculated.") @@ -453,8 +472,8 @@ def execute_static_items(self) -> None: self.logger.debug("execute_static_item called") if not self.suspended: - self.logger.info(f"{len(self._static_items)} items will be calculated.") - [self.item_queue.put(i) for i in self._static_items] + self.logger.info(f"{len(self._static_items())} items will be calculated.") + [self.item_queue.put(i) for i in self._static_items()] else: self.logger.info(f"Plugin is suspended. No items will be calculated.") @@ -466,8 +485,8 @@ def execute_info_items(self) -> None: self.logger.debug("execute_info_items called") if not self.suspended: - self.logger.info(f"{len(self._static_items)} items will be calculated.") - [self.item_queue.put(i) for i in self._static_items] + self.logger.info(f"{len(self._info_items())} items will be calculated.") + [self.item_queue.put(i) for i in self._info_items()] else: self.logger.info(f"Plugin is suspended. No items will be calculated.") @@ -477,35 +496,32 @@ def execute_all_items(self) -> None: """ if not self.suspended: - self.logger.info(f"Values for all {len(self._ondemand_items)} items with 'db_addon_fct' attribute, which are not 'on-change', will be calculated!") - [self.item_queue.put(i) for i in self._ondemand_items] + self.logger.info(f"Values for all {len(self._ondemand_items())} items with 'db_addon_fct' attribute, which are not 'on-change', will be calculated!") + [self.item_queue.put(i) for i in self._ondemand_items()] else: self.logger.info(f"Plugin is suspended. No items will be calculated.") def work_item_queue(self) -> None: """ Handles item queue were all to be executed items were be placed in. - """ - self.logger.info(f"work_item_queue called.") - while self.alive: try: queue_entry = self.item_queue.get(True, 10) - self.logger.info(f"{queue_entry} received.") + self.logger.info(f" Queue Entry: '{queue_entry}' received.") except queue.Empty: - self._active_queue_item = '-' + self.active_queue_item = '-' pass else: if isinstance(queue_entry, tuple): item, value = queue_entry - self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-change' item {item.id()} with {value=} will be processed.") - self._active_queue_item = str(item.id()) + self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-change' item '{item.path()}' with {value=} will be processed.") + self.active_queue_item = str(item.path()) self.handle_onchange(item, value) else: - self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-demand' item {queue_entry.id()} will be processed.") - self._active_queue_item = str(queue_entry.id()) + self.logger.info(f"# {self.item_queue.qsize() + 1} item(s) to do. || 'on-demand' item '{queue_entry.path()}' will be processed.") + self.active_queue_item = str(queue_entry.path()) self.handle_ondemand(queue_entry) def handle_ondemand(self, item: Item) -> None: @@ -517,135 +533,174 @@ def handle_ondemand(self, item: Item) -> None: # set/get parameters item_config = self.get_item_config(item) - _db_addon_fct = item_config['attribute'] - _database_item = item_config.get('database_item') - _ignore_value = item_config.get('ignore_value') - _result = None + db_addon = item_config['db_addon'] + db_addon_fct = item_config['db_addon_fct'] + database_item = item_config['database_item'] + ignore_value = item_config.get('ignore_value') + result = None + self.logger.debug(f"handle_ondemand: Item={item.path()} with {item_config=}") # handle info functions - if _db_addon_fct.startswith('info_'): + if db_addon == 'info': # handle info_db_version - if _db_addon_fct == 'info_db_version': - _result = self._get_db_version() + if db_addon_fct == 'info_db_version': + result = self._get_db_version() + self.logger.debug(f"handle_ondemand: info_db_version {result=}") + else: + self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") # handle general functions - elif _db_addon_fct.startswith('general_'): + elif db_addon_fct in ALL_GEN_ATTRIBUTES: # handle oldest_value - if _db_addon_fct == 'general_oldest_value': - _result = self._get_oldest_value(_database_item) + if db_addon_fct == 'general_oldest_value': + result = self._get_oldest_value(database_item) # handle oldest_log - elif _db_addon_fct == 'general_oldest_log': - _result = self._get_oldest_log(_database_item) + elif db_addon_fct == 'general_oldest_log': + result = self._get_oldest_log(database_item) - # handle item starting with 'verbrauch_' - elif _db_addon_fct.startswith('verbrauch_'): + else: + self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") + # handle item starting with 'verbrauch_' + elif db_addon_fct in ALL_VERBRAUCH_ATTRIBUTES: if self.execute_debug: self.logger.debug(f"handle_ondemand: 'verbrauch' detected.") - _result = self._handle_verbrauch(_database_item, _db_addon_fct) + result = self._handle_verbrauch(database_item, db_addon_fct, ignore_value) - if _result and _result < 0: - self.logger.warning(f"Result of item {item.id()} with {_db_addon_fct=} was negative. Something seems to be wrong.") + if result and result < 0: + self.logger.warning(f"Result of item {item.path()} with {db_addon_fct=} was negative. Something seems to be wrong.") # handle item starting with 'zaehlerstand_' of format 'zaehlerstand_timeframe_timedelta' like 'zaehlerstand_woche_minus1' - elif _db_addon_fct.startswith('zaehlerstand_'): - + elif db_addon_fct in ALL_ZAEHLERSTAND_ATTRIBUTES: if self.execute_debug: self.logger.debug(f"handle_ondemand: 'zaehlerstand' detected.") - _result = self._handle_zaehlerstand(_database_item, _db_addon_fct) + result = self._handle_zaehlerstand(database_item, db_addon_fct, ignore_value) # handle item starting with 'minmax_' - elif _db_addon_fct.startswith('minmax_'): - + elif db_addon_fct in ALL_HISTORIE_ATTRIBUTES: if self.execute_debug: self.logger.debug(f"handle_ondemand: 'minmax' detected.") - _result = self._handle_min_max(_database_item, _db_addon_fct, _ignore_value) + result = self._handle_min_max(database_item, db_addon_fct, ignore_value)[0][1] + + # handle item starting with 'tagesmitteltemperatur_' + elif db_addon_fct in ALL_TAGESMITTEL_ATTRIBUTES: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'tagesmitteltemperatur' detected.") + + result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value)[0][1] # handle item starting with 'serie_' - elif _db_addon_fct.startswith('serie_'): - _db_addon_params = STD_REQUEST_DICT[_db_addon_fct] - _db_addon_params['item'] = _database_item + elif db_addon_fct in ALL_SERIE_ATTRIBUTES: + if 'minmax' in db_addon_fct: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'serie_minmax' detected.") - if self.execute_debug: - self.logger.debug(f"handle_ondemand: 'serie' detected with {_db_addon_params=}") + result = self._handle_min_max(database_item, db_addon_fct, ignore_value) - _result = self._handle_serie(_db_addon_params) + elif 'verbrauch' in db_addon_fct: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'serie_verbrauch' detected.") - # handle kaeltesumme - elif _db_addon_fct == 'kaeltesumme': - _db_addon_params = item_config['params'] - _db_addon_params['_database_item'] = item_config['database_item'] + result = self._handle_verbrauch(database_item, db_addon_fct, ignore_value) + elif 'zaehlerstand' in db_addon_fct: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'serie_zaehlerstand' detected.") + + result = self._handle_zaehlerstand(database_item, db_addon_fct, ignore_value) + + elif 'tagesmitteltemperatur' in db_addon_fct: + if self.execute_debug: + self.logger.debug(f"handle_ondemand: 'serie_tagesmittelwert' detected.") + + result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value) + else: + self.logger.warning(f"No handling for attribute {db_addon_fct=} for Item {item.path()} defined.") + + # handle kaeltesumme + elif db_addon_fct == 'kaeltesumme': + db_addon_params = item_config.get('params') if self.execute_debug: - self.logger.debug(f"handle_ondemand: {_db_addon_fct=} detected; {_db_addon_params=}") + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") - _result = self._handle_kaeltesumme(**_db_addon_params) + if db_addon_params: + db_addon_params.update({'database_item': item_config['database_item']}) + result = self._handle_kaeltesumme(**db_addon_params) # handle waermesumme - elif _db_addon_fct == 'waermesumme': - _db_addon_params = item_config['params'] - _db_addon_params['_database_item'] = item_config['database_item'] - + elif db_addon_fct == 'waermesumme': + db_addon_params = item_config.get('params') if self.execute_debug: - self.logger.debug(f"handle_ondemand: {_db_addon_fct=} detected; {_db_addon_params=}") + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") - _result = self._handle_waermesumme(**_db_addon_params) + if db_addon_params: + db_addon_params.update({'database_item': item_config['database_item']}) + result = self._handle_waermesumme(**db_addon_params) # handle gruenlandtempsumme - elif _db_addon_fct == 'gruenlandtempsumme': - _db_addon_params = item_config['params'] - _db_addon_params['_database_item'] = item_config['database_item'] + elif db_addon_fct == 'gruenlandtempsumme': + db_addon_params = item_config.get('params') + if self.execute_debug: + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") + if db_addon_params: + db_addon_params.update({'database_item': item_config['database_item']}) + result = self._handle_gruenlandtemperatursumme(**db_addon_params) + + # handle wachstumsgradtage + elif db_addon_fct == 'wachstumsgradtage': + db_addon_params = item_config.get('params') if self.execute_debug: - self.logger.debug(f"handle_ondemand: {_db_addon_fct=} detected; {_db_addon_params=}") + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params}") - _result = self._handle_gruenlandtemperatursumme(**_db_addon_params) + if db_addon_params: + db_addon_params.update({'database_item': item_config['database_item']}) + result = self._handle_wachstumsgradtage(**db_addon_params) # handle tagesmitteltemperatur - elif _db_addon_fct == 'tagesmitteltemperatur': - _db_addon_params = item_config['params'] - _db_addon_params['_database_item'] = item_config['database_item'] - + elif db_addon_fct == 'tagesmitteltemperatur': + db_addon_params = item_config.get('params') if self.execute_debug: - self.logger.debug(f"handle_ondemand: {_db_addon_fct=} detected; {_db_addon_params=}") + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected; {db_addon_params=}") - _result = self._handle_tagesmitteltemperatur(**_db_addon_params) + if db_addon_params: + result = self._handle_tagesmitteltemperatur(database_item, db_addon_fct, ignore_value, db_addon_params) # handle db_request - elif _db_addon_fct == 'db_request': - _db_addon_params = item_config['params'] - _db_addon_params['îtem'] = item_config['database_item'] - + elif db_addon_fct == 'db_request': + db_addon_params = item_config.get('params') if self.execute_debug: - self.logger.debug(f"handle_ondemand: {_db_addon_fct=} detected with {_db_addon_params=}") + self.logger.debug(f"handle_ondemand: {db_addon_fct=} detected with {db_addon_params=}") - if _db_addon_params.keys() & {'func', 'item', 'timeframe'}: - _result = self._query_item(**_db_addon_params) - else: - self.logger.error(f"Attribute 'db_addon_params' not containing needed params for Item {item.id} with {_db_addon_fct=}.") + if db_addon_params: + db_addon_params.update({'database_item': item_config['database_item']}) + if db_addon_params.keys() & {'func', 'item', 'timeframe'}: + result = self._query_item(**db_addon_params) + else: + self.logger.error(f"Attribute 'db_addon_params' not containing needed params for Item {item.id} with {db_addon_fct=}.") # handle everything else else: - self.logger.warning(f"handle_ondemand: Function '{_db_addon_fct}' for item {item.id()} not defined or found.") + self.logger.warning(f"handle_ondemand: Function '{db_addon_fct}' for item {item.path()} not defined or found.") return # log result if self.execute_debug: - self.logger.debug(f"handle_ondemand: result is {_result} for item '{item.id()}' with '{_db_addon_fct=}'") + self.logger.debug(f"handle_ondemand: result is {result} for item '{item.path()}' with '{db_addon_fct=}'") - if _result is None: + if result is None: self.logger.info(f" Result was None; No item value will be set.") return # set item value and put data into plugin_item_dict - self.logger.info(f" Item value for '{item.id()}' will be set to {_result}") + self.logger.info(f" Item value for '{item.path()}' will be set to {result}") item_config = self.get_item_config(item) - item_config.update({'value': _result}) - item(_result, self.get_shortname()) + item_config.update({'value': result}) + item(result, self.get_shortname()) def handle_onchange(self, updated_item: Item, value: float) -> None: """ @@ -656,7 +711,7 @@ def handle_onchange(self, updated_item: Item, value: float) -> None: """ if self.onchange_debug: - self.logger.debug(f"handle_onchange called with updated_item={updated_item.id()} and value={value}.") + self.logger.debug(f"handle_onchange called with updated_item={updated_item.path()} and value={value}.") relevant_item_list = self.get_item_list('database_item', updated_item) if self.onchange_debug: @@ -665,18 +720,20 @@ def handle_onchange(self, updated_item: Item, value: float) -> None: for item in relevant_item_list: item_config = self.get_item_config(item) _database_item = item_config['database_item'] - _db_addon_fct = item_config['attribute'] - _var = _db_addon_fct.split('_') + _db_addon_fct = item_config['db_addon_fct'] _ignore_value = item_config['ignore_value'] + _var = _db_addon_fct.split('_') # handle minmax on-change items like minmax_heute_max, minmax_heute_min, minmax_woche_max, minmax_woche_min..... if _db_addon_fct.startswith('minmax') and len(_var) == 3 and _var[2] in ['min', 'max']: _timeframe = convert_timeframe(_var[1]) _func = _var[2] _cache_dict = self.current_values[_timeframe] + if not _timeframe: + return if self.onchange_debug: - self.logger.debug(f"handle_onchange: 'minmax' item {updated_item.id()} with {_func=} detected. Check for update of _cache_dicts and item value.") + self.logger.debug(f"handle_onchange: 'minmax' item {updated_item.path()} with {_func=} detected. Check for update of _cache_dicts and item value.") _initial_value = False _new_value = None @@ -684,11 +741,12 @@ def handle_onchange(self, updated_item: Item, value: float) -> None: # make sure, that database item is in cache dict if _database_item not in _cache_dict: _cache_dict[_database_item] = {} - if _cache_dict[_database_item].get(_func, None) is None: - _cached_value = self._query_item(func=_func, item=_database_item, timeframe=_timeframe, start=0, end=0, ignore_value=_ignore_value)[0][1] + if _cache_dict[_database_item].get(_func) is None: + _query_params = {'func': _func, 'item': _database_item, 'timeframe': _timeframe, 'start': 0, 'end': 0, 'ignore_value': _ignore_value} + _cached_value = self._query_item(**_query_params)[0][1] _initial_value = True if self.onchange_debug: - self.logger.debug(f"handle_onchange: Item={updated_item.id()} with _func={_func} and _timeframe={_timeframe} not in cache dict. recent value={_cached_value}.") + self.logger.debug(f"handle_onchange: Item={updated_item.path()} with _func={_func} and _timeframe={_timeframe} not in cache dict. recent value={_cached_value}.") else: _cached_value = _cache_dict[_database_item][_func] @@ -715,159 +773,215 @@ def handle_onchange(self, updated_item: Item, value: float) -> None: if _new_value: _cache_dict[_database_item][_func] = _new_value - self.logger.info(f"Item value for '{item.id()}' with func={_func} will be set to {_new_value}") + self.logger.info(f"Item value for '{item.path()}' with func={_func} will be set to {_new_value}") item_config = self.get_item_config(item) item_config.update({'value': _new_value}) item(_new_value, self.get_shortname()) else: - self.logger.info(f"Received value={value} is not influencing min / max value. Therefore item {item.id()} will not be changed.") + self.logger.info(f"Received value={value} is not influencing min / max value. Therefore item {item.path()} will not be changed.") # handle verbrauch on-change items ending with heute, woche, monat, jahr elif _db_addon_fct.startswith('verbrauch') and len(_var) == 2 and _var[1] in ['heute', 'woche', 'monat', 'jahr']: _timeframe = convert_timeframe(_var[1]) _cache_dict = self.previous_values[_timeframe] + if _timeframe is None: + return # make sure, that database item is in cache dict if _database_item not in _cache_dict: - _cached_value = self._query_item(func='max', item=_database_item, timeframe=_timeframe, start=1, end=1, ignore_value=_ignore_value)[0][1] + _query_params = {'func': 'max', 'item': _database_item, 'timeframe': _timeframe, 'start': 1, 'end': 1, 'ignore_value': _ignore_value} + _cached_value = self._query_item(**_query_params)[0][1] _cache_dict[_database_item] = _cached_value if self.onchange_debug: - self.logger.debug(f"handle_onchange: Item={updated_item.id()} with {_timeframe=} not in cache dict. Value {_cached_value} has been added.") + self.logger.debug(f"handle_onchange: Item={updated_item.path()} with {_timeframe=} not in cache dict. Value {_cached_value} has been added.") else: _cached_value = _cache_dict[_database_item] # calculate value, set item value, put data into plugin_item_dict if _cached_value is not None: _new_value = round(value - _cached_value, 1) - self.logger.info(f"Item value for '{item.id()}' will be set to {_new_value}") + self.logger.info(f"Item value for '{item.path()}' will be set to {_new_value}") item_config = self.get_item_config(item) item_config.update({'value': _new_value}) item(_new_value, self.get_shortname()) else: self.logger.info(f"Value for end of last {_timeframe} not available. No item value will be set.") + def _update_database_items(self): + for item in self._database_item_path_items(): + item_config = self.get_item_config(item) + database_item_path = item_config.get('database_item') + database_item = self.items.return_item(database_item_path) + + if database_item is None: + self.logger.warning(f"Database-Item for Item with config item path for Database-Item {database_item_path!r} not found. Item '{item.path()}' will be removed from plugin.") + self.remove_item(item) + else: + item_config.update({'database_item': database_item}) + @property def log_level(self): return self.logger.getEffectiveLevel() - @property def queue_backlog(self): return self.item_queue.qsize() - @property - def active_queue_item(self): - return self._active_queue_item - - @property def db_version(self): return self._get_db_version() - @property def _startup_items(self) -> list: return self.get_item_list('startup', True) - @property def _onchange_items(self) -> list: return self.get_item_list('cycle', 'on-change') - @property def _daily_items(self) -> list: return self.get_item_list('cycle', 'daily') - @property def _weekly_items(self) -> list: return self.get_item_list('cycle', 'weekly') - @property def _monthly_items(self) -> list: return self.get_item_list('cycle', 'monthly') - @property def _yearly_items(self) -> list: return self.get_item_list('cycle', 'yearly') - @property def _static_items(self) -> list: return self.get_item_list('cycle', 'static') - @property def _admin_items(self) -> list: return self.get_item_list('db_addon', 'admin') - @property def _info_items(self) -> list: return self.get_item_list('db_addon', 'info') - @property def _database_items(self) -> list: return self.get_item_list('db_addon', 'database') - @property + def _database_item_path_items(self) -> list: + return self.get_item_list('database_item_path', True) + def _ondemand_items(self) -> list: - return self._daily_items + self._weekly_items + self._monthly_items + self._yearly_items + self._static_items + return self._daily_items() + self._weekly_items() + self._monthly_items() + self._yearly_items() + self._static_items() ############################## - # Public functions + # Public functions / Using item_path ############################## - def gruenlandtemperatursumme(self, item: Item, year: Union[int, str]) -> Union[int, None]: + def gruenlandtemperatursumme(self, item_path: str, year: Union[int, str]) -> Union[int, None]: """ - Query database for gruenlandtemperatursumme for given year or year/month + Query database for gruenlandtemperatursumme for given year or year https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme - :param item: item object or item_id for which the query should be done - :param year: year the gruenlandtemperatursumme should be calculated for + Beim Grünland wird die Wärmesumme nach Ernst und Loeper benutzt, um den Vegetationsbeginn und somit den Termin von Düngungsmaßnahmen zu bestimmen. + Dabei erfolgt die Aufsummierung der Tagesmitteltemperaturen über 0 °C, wobei der Januar mit 0.5 und der Februar mit 0.75 gewichtet wird. + Bei einer Wärmesumme von 200 Grad ist eine Düngung angesagt. + :param item_path: item object or item_id for which the query should be done + :param year: year the gruenlandtemperatursumme should be calculated for :return: gruenlandtemperatursumme """ - return self._handle_gruenlandtemperatursumme(item, year) + item = self.items.return_item(item_path) + if item: + return self._handle_gruenlandtemperatursumme(item, year) - def waermesumme(self, item: Item, year, month: Union[int, str] = None) -> Union[int, None]: + def waermesumme(self, item_path: str, year, month: Union[int, str] = None, threshold: int = 0) -> Union[int, None]: """ Query database for waermesumme for given year or year/month + https://de.wikipedia.org/wiki/W%C3%A4rmesumme - :param item: item object or item_id for which the query should be done + :param item_path: item object or item_id for which the query should be done :param year: year the waermesumme should be calculated for :param month: month the waermesumme should be calculated for - + :param threshold: threshold for temperature :return: waermesumme """ - return self._handle_waermesumme(item, year, month) + item = self.items.return_item(item_path) + if item: + return self._handle_waermesumme(item, year, month, threshold) - def kaeltesumme(self, item: Item, year, month: Union[int, str] = None) -> Union[int, None]: + def kaeltesumme(self, item_path: str, year, month: Union[int, str] = None) -> Union[int, None]: """ Query database for kaeltesumme for given year or year/month + https://de.wikipedia.org/wiki/K%C3%A4ltesumme - :param item: item object or item_id for which the query should be done + :param item_path: item object or item_id for which the query should be done :param year: year the kaeltesumme should be calculated for :param month: month the kaeltesumme should be calculated for - :return: kaeltesumme """ - return self._handle_kaeltesumme(item, year, month) + item = self.items.return_item(item_path) + if item: + return self._handle_kaeltesumme(item, year, month) - def tagesmitteltemperatur(self, item: Item, count: int = None) -> list: + def tagesmitteltemperatur(self, item_path: str, timeframe: str = None, count: int = None) -> list: """ Query database for tagesmitteltemperatur + https://www.dwd.de/DE/leistungen/klimadatendeutschland/beschreibung_tagesmonatswerte.html - :param item: item object or item_id for which the query should be done - :param count: start of timeframe defined by number of time increments starting from now to the left (into the past) - + :param item_path: item object or item_id for which the query should be done + :param timeframe: timeincrement for determination + :param count: number of time increments starting from now to the left (into the past) :return: tagesmitteltemperatur - :rtype: list of tuples """ - return self._handle_tagesmitteltemperatur(_database_item=item, count=count) + if not timeframe: + timeframe = 'day' + + if not count: + count = 0 + + item = self.items.return_item(item_path) + if item: + return self._handle_tagesmitteltemperatur(database_item=item, db_addon_fct='tagesmitteltemperatur', params={'timeframe': timeframe, 'count': count}) + + def wachstumsgradtage(self, item_path: str, year: Union[int, str], threshold: int) -> Union[int, None]: + """ + Query database for wachstumsgradtage + https://de.wikipedia.org/wiki/Wachstumsgradtag + + :param item_path: item object or item_id for which the query should be done + :param year: year the wachstumsgradtage should be calculated for + :param threshold: Temperature in °C as threshold: Ein Tage mit einer Tagesdurchschnittstemperatur oberhalb des Schellenwertes gilt als Wachstumsgradtag + :return: wachstumsgradtage + """ + + item = self.items.return_item(item_path) + if item: + return self._handle_wachstumsgradtage(item, year, threshold) + + def temperaturserie(self, item_path: str, year: Union[int, str], method: str) -> Union[list, None]: + """ + Query database for wachstumsgradtage + https://de.wikipedia.org/wiki/Wachstumsgradtag - def fetch_log(self, func: str, item: Item, timeframe: str, start: int = None, end: int = 0, count: int = None, group: str = None, group2: str = None, ignore_value=None) -> Union[list, None]: + :param item_path: item object or item_id for which the query should be done + :param year: year the wachstumsgradtage should be calculated for + :param method: Calculation method + :return: wachstumsgradtage + """ + + item = self.items.return_item(item_path) + if item: + return self._handle_temperaturserie(item, year, method) + + def query_item(self, func: str, item_path: str, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value=None) -> list: + item = self.items.return_item(item_path) + if item is None: + return [] + + return self._query_item(func, item, timeframe, start, end, group, group2, ignore_value) + + def fetch_log(self, func: str, item_path: str, timeframe: str, start: int = None, end: int = 0, count: int = None, group: str = None, group2: str = None, ignore_value=None) -> list: """ Query database, format response and return it :param func: function to be used at query - :param item: item str or item_id for which the query should be done + :param item_path: item str or item_id for which the query should be done :param timeframe: time increment für definition of start, end, count (day, week, month, year) :param start: start of timeframe (oldest) for query given in x time increments (default = None, meaning complete database) :param end: end of timeframe (newest) for query given in x time increments (default = 0, meaning today, end of last week, end of last month, end of last year) @@ -878,12 +992,15 @@ def fetch_log(self, func: str, item: Item, timeframe: str, start: int = None, en :return: formatted query response """ + item = self.items.return_item(item_path) - if isinstance(item, str): - item = self.items.return_item(item) if count: start, end = count_to_start(count) - return self._query_item(func=func, item=item, timeframe=timeframe, start=start, end=end, group=group, group2=group2, ignore_value=ignore_value) + + if item and start and end: + return self._query_item(func=func, item=item, timeframe=timeframe, start=start, end=end, group=group, group2=group2, ignore_value=ignore_value) + else: + return [] def fetch_raw(self, query: str, params: dict = None) -> Union[list, None]: """ @@ -924,346 +1041,812 @@ def suspend(self, state: bool = False) -> bool: # write back value to item, if one exists for item in self.get_item_list('db_addon', 'admin'): item_config = self.get_item_config(item) - if item_config['attribute'] == 'suspend': + if item_config['db_addon_fct'] == 'suspend': item(self.suspended, self.get_shortname()) return self.suspended ############################## - # Support stuff + # Support stuff / Using Item Object ############################## - def _handle_min_max(self, _database_item: Item, _db_addon_fct: str, _ignore_value): + def _handle_min_max(self, database_item: Item, db_addon_fct: str, ignore_value=None) -> Union[list, None]: """ Handle execution of min/max calculation - """ - - _var = _db_addon_fct.split('_') - _result = None - _timeframes = ['heute', 'woche', 'monat', 'jahr'] - # handle all on_change functions of format 'minmax_timeframe_function' like 'minmax_heute_max' - if len(_var) == 3 and _var[1] in _timeframes and _var[2] in ['min', 'max']: + if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: if self.execute_debug: - self.logger.debug(f"on-change function={_var[0]} with {_var[1]} detected; will be calculated by next change of database item") + self.logger.debug(f"on-change function with 'min/max' detected; will be calculated by next change of database item") + return - # handle all 'last' functions in format 'minmax_last_window_function' like 'minmax_last_24h_max' - elif len(_var) == 4 and _var[1] == 'last' and _var[3] in ['min', 'max', 'avg']: - _window = _var[2] - _func = _var[3] - _timeframe = convert_timeframe(_window[-1:]) - _timedelta = int(_window[:-1]) + _var = db_addon_fct.split('_') + group = None + group2 = None - if self.execute_debug: - self.logger.debug(f"_handle_min_max: 'last' function detected. {_window=}, {_func=}") - - if _timeframe in ['day', 'week', 'month', 'year']: - _result = self._query_item(func=_func, item=_database_item, timeframe=_timeframe, start=_timedelta, end=0, ignore_value=_ignore_value)[0][1] + # handle all 'last' functions in format 'minmax_last_window_function' like 'minmax_last_24h_max' + if len(_var) == 4 and _var[1] == 'last': + func = _var[3] + timeframe = convert_timeframe(_var[2][-1:]) + start = to_int(_var[2][:-1]) + end = 0 + log_text = 'minmax_last' + if timeframe is None or start is None: + return # handle all functions 'min/max/avg' in format 'minmax_timeframe_timedelta_func' like 'minmax_heute_minus2_max' - elif len(_var) == 4 and _var[1] in _timeframes and _var[2].startswith('minus') and _var[3] in ['min', 'max', 'avg']: - _timeframe = convert_timeframe(_var[1]) # day, week, month, year - _timedelta = _var[2][-1] # 1, 2, 3, ... - _func = _var[3] # min, max, avg + elif len(_var) == 4 and _var[2].startswith('minus'): + func = _var[3] # min, max, avg + timeframe = convert_timeframe(_var[1]) # day, week, month, year + start = to_int(_var[2][-1]) # 1, 2, 3, ... + end = start + log_text = 'minmax' + if timeframe is None or start is None: + return - if self.execute_debug: - self.logger.debug(f"_handle_min_max: _db_addon_fct={_func} detected; {_timeframe=}, {_timedelta=}") + # handle all functions 'serie_min/max/avg' in format 'serie_minmax_timeframe_func_count_group' like 'serie_minmax_monat_min_15m' + elif _var[0] == 'serie' and _var[1] == 'minmax': + timeframe = convert_timeframe(_var[2]) + func = _var[3] + start = to_int(_var[4][:-1]) + end = 0 + group = convert_timeframe(_var[4][len(_var[4]) - 1]) + log_text = 'serie_min/max/avg' + if timeframe is None or start is None or group is None: + return + else: + self.logger.info(f"_handle_min_max: No adequate function for {db_addon_fct=} found.") + return + + if func not in ALLOWED_MINMAX_FUNCS: + self.logger.info(f"_handle_min_max: Called {func=} not in allowed functions={ALLOWED_MINMAX_FUNCS}.") + return - if isinstance(_timedelta, str) and _timedelta.isdigit(): - _timedelta = int(_timedelta) + query_params = {'item': database_item, 'ignore_value': ignore_value, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} - if isinstance(_timedelta, int): - _result = self._query_item(func=_func, item=_database_item, timeframe=_timeframe, start=_timedelta, end=_timedelta, ignore_value=_ignore_value)[0][1] + if self.execute_debug: + self.logger.debug(f"_handle_min_max: db_addon_fct={log_text} function detected. {query_params=}") - return _result + return self._query_item(**query_params) - def _handle_zaehlerstand(self, _database_item: Item, _db_addon_fct: str): + def _handle_zaehlerstand(self, database_item: Item, db_addon_fct: str, ignore_value=None) -> Union[list, None]: """ Handle execution of Zaehlerstand calculation - """ + # handle all on_change functions + if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: + if self.execute_debug: + self.logger.debug(f"on-change function with 'zaehlerstand' detected; will be calculated by next change of database item") + return - _var = _db_addon_fct.split('_') # zaehlerstand_heute_minus1 - _result = None - _func = _var[0] - _timeframe = convert_timeframe(_var[1]) - _timedelta = _var[2][-1] + _var = db_addon_fct.split('_') + group = None + group2 = None + + # handle functions starting with 'zaehlerstand' like 'zaehlerstand_heute_minus1' + if len(_var) == 3 and _var[1] == 'zaehlerstand': + func = 'max' + timeframe = convert_timeframe(_var[1]) + start = to_int(_var[2][-1]) + end = start + log_text = 'zaehlerstand' + if timeframe is None or start is None: + return - if self.execute_debug: - self.logger.debug(f"_handle_zaehlerstand: {_func} function detected. {_timeframe=}, {_timedelta=}") + # handle all functions 'serie_min/max/avg' in format 'serie_minmax_timeframe_func_count_group' like 'serie_zaehlerstand_tag_30d' + elif _var[0] == 'serie' and _var[1] == 'zaehlerstand': + func = 'max' + timeframe = convert_timeframe(_var[2]) + start = to_int(_var[3][:-1]) + end = 0 + group = convert_timeframe(_var[3][len(_var[3]) - 1]) + log_text = 'serie_min/max/avg' + if timeframe is None or start is None or group is None: + return + else: + self.logger.info(f"_handle_zaehlerstand: No adequate function for {db_addon_fct=} found.") + return - if isinstance(_timedelta, str) and _timedelta.isdigit(): - _timedelta = int(_timedelta) + query_params = {'item': database_item, 'ignore_value': ignore_value, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} - if _func == 'zaehlerstand': - _result = self._query_item(func='max', item=_database_item, timeframe=_timeframe, start=_timedelta, end=_timedelta)[0][1] + if self.execute_debug: + self.logger.debug(f"_handle_zaehlerstand: db_addon_fct={log_text} function detected. {query_params=}") - return _result + return self._query_item(**query_params) - def _handle_verbrauch(self, _database_item: Item, _db_addon_fct: str): + def _handle_verbrauch(self, database_item: Item, db_addon_fct: str, ignore_value=None): """ Handle execution of verbrauch calculation - """ - _var = _db_addon_fct.split('_') - _result = None + self.logger.debug(f"_handle_verbrauch called with {database_item=} and {db_addon_fct=}") + + def consumption_calc(c_start, c_end) -> Union[float, None]: + """ + Handle query for Verbrauch + + :param c_start: beginning of timeframe + :param c_end: end of timeframe + """ + + if self.prepare_debug: + self.logger.debug(f"_consumption_calc called with {database_item=}, {timeframe=}, {c_start=}, {c_end=}") + + _result = None + _query_params = {'item': database_item, 'timeframe': timeframe} + + # get value for end and check it; + _query_params.update({'func': 'max', 'start': c_end, 'end': c_end}) + value_end = self._query_item(**_query_params)[0][1] + + if self.prepare_debug: + self.logger.debug(f"_consumption_calc {value_end=}") + + if value_end is None: # if None (Error) return + return + elif value_end == 0: # wenn die Query "None" ergab, was wiederum bedeutet, dass zum Abfragezeitpunkt keine Daten vorhanden sind, ist der value hier gleich 0 → damit der Verbrauch für die Abfrage auch Null + return 0 + + # get value for start and check it; + _query_params.update({'func': 'min', 'start': c_end, 'end': c_end}) + value_start = self._query_item(**_query_params)[0][1] + if self.prepare_debug: + self.logger.debug(f"_consumption_calc {value_start=}") + + if value_start is None: # if None (Error) return + return + + if value_start == 0: # wenn der Wert zum Startzeitpunkt 0 ist, gab es dort keinen Eintrag (also keinen Verbrauch), dann frage den nächsten Eintrag in der DB ab. + self.logger.info(f"No DB Entry found for requested start date. Looking for next DB entry.") + _query_params.update({'func': 'next', 'start': c_start, 'end': c_end}) + value_start = self._query_item(**_query_params)[0][1] + if self.prepare_debug: + self.logger.debug(f"_consumption_calc: next available value is {value_start=}") + + # calculate result + if value_start is not None: + return round(value_end - value_start, 1) # handle all on_change functions of format 'verbrauch_timeframe' like 'verbrauch_heute' - if len(_var) == 2 and _var[1] in ['heute', 'woche', 'monat', 'jahr']: + if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: if self.execute_debug: - self.logger.debug(f"on_change function={_var[1]} detected; will be calculated by next change of database item") + self.logger.debug(f"on_change function with 'verbrauch' detected; will be calculated by next change of database item") + return + + _var = db_addon_fct.split('_') # handle all functions 'verbrauch' in format 'verbrauch_timeframe_timedelta' like 'verbrauch_heute_minus2' - elif len(_var) == 3 and _var[1] in ['heute', 'woche', 'monat', 'jahr'] and _var[2].startswith('minus'): - _timeframe = convert_timeframe(_var[1]) - _timedelta = _var[2][-1] + if len(_var) == 3 and _var[1] in ['heute', 'woche', 'monat', 'jahr'] and _var[2].startswith('minus'): + timeframe = convert_timeframe(_var[1]) + timedelta = to_int(_var[2][-1]) + if timedelta is None or timeframe is None: + return if self.execute_debug: - self.logger.debug(f"_handle_verbrauch: '{_db_addon_fct}' function detected. {_timeframe=}, {_timedelta=}") + self.logger.debug(f"_handle_verbrauch: '{db_addon_fct}' function detected. {timeframe=}, {timedelta=}") - if isinstance(_timedelta, str) and _timedelta.isdigit(): - _timedelta = int(_timedelta) - - if isinstance(_timedelta, int): - _result = self._consumption_calc(_database_item, _timeframe, start=_timedelta + 1, end=_timedelta) + return consumption_calc(c_start=timedelta + 1, c_end=timedelta) # handle all functions of format 'verbrauch_function_window_timeframe_timedelta' like 'verbrauch_rolling_12m_woche_minus1' elif len(_var) == 5 and _var[1] == 'rolling' and _var[4].startswith('minus'): - _func = _var[1] - _window = _var[2] # 12m - _window_inc = int(_window[:-1]) # 12 - _window_dur = convert_timeframe(_window[-1]) # day, week, month, year - _timeframe = convert_timeframe(_var[3]) # day, week, month, year - _timedelta = _var[4][-1] # 1 + func = _var[1] + window = _var[2] # 12m + window_inc = to_int(window[:-1]) # 12 + window_dur = convert_timeframe(window[-1]) # day, week, month, year + timeframe = convert_timeframe(_var[3]) # day, week, month, year + timedelta = to_int(_var[4][-1]) # 1 + endtime = timedelta + + if window_inc is None or window_dur is None or timeframe is None or timedelta is None: + return if self.execute_debug: - self.logger.debug(f"_handle_verbrauch: '{_func}' function detected. {_window=}, {_timeframe=}, {_timedelta=}") + self.logger.debug(f"_handle_verbrauch: '{func}' function detected. {window=}, {timeframe=}, {timedelta=}") - if isinstance(_timedelta, str) and _timedelta.isdigit(): - _timedelta = int(_timedelta) - _endtime = _timedelta - - if _func == 'rolling' and _window_dur in ['day', 'week', 'month', 'year']: - _starttime = convert_duration(_timeframe, _window_dur) * _window_inc - _result = self._consumption_calc(_database_item, _timeframe, _starttime, _endtime) + if window_dur in ['day', 'week', 'month', 'year']: + starttime = convert_duration(timeframe, window_dur) * window_inc + return consumption_calc(c_start=starttime, c_end=endtime) # handle all functions of format 'verbrauch_timeframe_timedelta' like 'verbrauch_jahreszeitraum_minus1' elif len(_var) == 3 and _var[1] == 'jahreszeitraum' and _var[2].startswith('minus'): - _timeframe = convert_timeframe(_var[1]) # day, week, month, year - _timedelta = _var[2][-1] # 1 oder 2 oder 3 + timeframe = convert_timeframe(_var[1]) # day, week, month, year + timedelta = to_int(_var[2][-1]) # 1 oder 2 oder 3 + if timedelta is None or timeframe is None: + return if self.execute_debug: - self.logger.debug(f"_handle_verbrauch: '{_db_addon_fct}' function detected. {_timeframe=}, {_timedelta=}") + self.logger.debug(f"_handle_verbrauch: '{db_addon_fct}' function detected. {timeframe=}, {timedelta=}") + + today = datetime.date.today() + year = today.year - timedelta + start_date = datetime.date(year, 1, 1) - relativedelta(days=1) # Start ist Tag vor dem 1.1., damit Abfrage den Maximalwert von 31.12. 00:00:00 bis 1.1. 00:00:00 ergibt + end_date = today - relativedelta(years=timedelta) + start = (today - start_date).days + end = (today - end_date).days + + return consumption_calc(c_start=start, c_end=end) + + # handle all functions of format 'serie_verbrauch_timeframe_countgroup' like 'serie_verbrauch_tag_30d' + elif db_addon_fct.startswith('serie_') and len(_var) == 4: + self.logger.debug(f"_handle_verbrauch serie reached") + func = 'diff_max' + timeframe = convert_timeframe(_var[2]) + start = to_int(_var[3][:-1]) + group = convert_timeframe(_var[3][len(_var[3]) - 1]) + group2 = None + if timeframe is None or start is None or group is None: + self.logger.warning(f"For calculating '{db_addon_fct}' not all mandatory parameters given. {timeframe=}, {start=}, {group=}") + return - if isinstance(_timedelta, str) and _timedelta.isdigit(): - _timedelta = int(_timedelta) + query_params = {'func': func, 'item': database_item, 'timeframe': timeframe, 'start': start, 'end': 0, 'group': group, 'group2': group2, 'ignore_value': ignore_value} - if isinstance(_timedelta, int): - _today = datetime.date.today() - _year = _today.year - _timedelta - _start_date = datetime.date(_year, 1, 1) - relativedelta(days=1) # Start ist Tag vor dem 1.1., damit Abfrage den Maximalwert von 31.12. 00:00:00 bis 1.1. 00:00:00 ergibt - _end_date = _today - relativedelta(years=_timedelta) - _start = (_today - _start_date).days - _end = (_today - _end_date).days + if self.execute_debug: + self.logger.debug(f"_handle_verbrauch: 'serie_verbrauch_timeframe_countgroup' function detected. {query_params=}") - _result = self._consumption_calc(_database_item, _timeframe, _start, _end) + return self._query_item(**query_params) - return _result + else: + self.logger.info(f"_handle_verbrauch: No adequate function for {db_addon_fct=} found.") + return - def _handle_serie(self, _db_addon_params: dict): + def _handle_tagesmitteltemperatur(self, database_item: Item, db_addon_fct: str, ignore_value=None, params: dict = None) -> list: """ - Handle execution of serie calculation + Query database for tagesmitteltemperatur + :param database_item: item object or item_id for which the query should be done + :param db_addon_fct + :param ignore_value + :param params: + :return: tagesmitteltemperatur """ - return self._query_item(**_db_addon_params) - def _handle_kaeltesumme(self, _database_item: Item, year: Union[int, str], month: Union[int, str] = None) -> Union[int, None]: + # handle all on_change functions + if db_addon_fct in ALL_ONCHANGE_ATTRIBUTES: + if self.execute_debug: + self.logger.debug(f"on_change function with 'tagesmitteltemperatur' detected; will be calculated by next change of database item") + return [] + + _var = db_addon_fct.split('_') + group = None + group2 = None + + # handle tagesmitteltemperatur + if db_addon_fct == 'tagesmitteltemperatur': + if not params: + return [] + + func = 'max' + timeframe = convert_timeframe(params.get('timeframe')) + log_text = 'tagesmitteltemperatur' + count = to_int(params.get('count')) + if timeframe is None or not count: + return [] + + start, end = count_to_start(count) + + # handle 'tagesmittelwert_timeframe_timedelta' like 'tagesmittelwert_heute_minus1' + elif len(_var) == 3 and _var[2].startswith('minus'): + func = 'max' + timeframe = convert_timeframe(_var[1]) + start = to_int(_var[2][-1]) + end = start + log_text = 'tagesmittelwert_timeframe_timedelta' + if timeframe is None or start is None: + return [] + + # handle 'serie_tagesmittelwert_countgroup' like 'serie_tagesmittelwert_0d' + elif db_addon_fct.startswith('serie_') and len(_var) == 3: + # 'serie_tagesmittelwert_0d': {'func': 'max', 'timeframe': 'year', 'start': 0, 'end': 0, 'group': 'day'}, + func = 'max' + timeframe = 'year' + log_text = 'serie_tagesmittelwert_countgroup' + start = to_int(_var[2][:-1]) + end = 0 + group = convert_timeframe(_var[2][len(_var[2]) - 1]) + if group is None or start is None: + return [] + + # handle 'serie_tagesmittelwert_group2_count_group' like 'serie_tagesmittelwert_stunde_0d' + elif db_addon_fct.startswith('serie_') and len(_var) == 4: + # 'serie_tagesmittelwert_stunde_0d': {'func': 'avg1', 'timeframe': 'day', 'start': 0, 'end': 0, 'group': 'hour', 'group2': 'day'}, + # 'serie_tagesmittelwert_stunde_30d': {'func': 'avg1', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'hour', 'group2': 'day'}, + func = 'avg1' + timeframe = 'day' + log_text = 'serie_tagesmittelwert_group2_countgroup' + start = to_int(_var[3][:-1]) + end = 0 + group = 'hour' + group2 = convert_timeframe(_var[3][len(_var[3]) - 1]) + if group2 is None or start is None: + return [] + + # handle 'serie_tagesmittelwert_group2_start_endgroup' like 'serie_tagesmittelwert_stunde_30_0d' + elif db_addon_fct.startswith('serie_') and len(_var) == 5: + timeframe = 'day' + method = 'raw' + start = to_int(_var[3]) + end = to_int(_var[4][:-1]) + if start is None or end is None: + return [] + + return self._prepare_temperature_list(database_item=database_item, timeframe=timeframe, start=start, end=end, method=method) + + # handle everything else + else: + self.logger.info(f"_handle_tagesmitteltemperatur: No adequate function for {db_addon_fct=} found.") + return [] + + query_params = {'item': database_item, 'ignore_value': ignore_value, 'func': func, 'timeframe': timeframe, 'start': start, 'end': end, 'group': group, 'group2': group2} + + if self.execute_debug: + self.logger.debug(f"_handle_tagesmitteltemperatur: db_addon_fct={log_text} function detected. {query_params=}") + + return self._query_item(**query_params) + + def _handle_kaeltesumme(self, database_item: Item, year: Union[int, str], month: Union[int, str] = None) -> Union[int, None]: """ Query database for kaeltesumme for given year or year/month + https://de.wikipedia.org/wiki/K%C3%A4ltesumme - :param _database_item: item object or item_id for which the query should be done + :param database_item: item object or item_id for which the query should be done :param year: year the kaeltesumme should be calculated for :param month: month the kaeltesumme should be calculated for :return: kaeltesumme """ + self.logger.debug(f"_handle_kaeltesumme called with {database_item=}, {year=}, {month=}") + # check validity of given year if not valid_year(year): - self.logger.error(f"kaeltesumme: Year for item={_database_item.id()} was {year}. This is not a valid year. Query cancelled.") + self.logger.error(f"_handle_kaeltesumme: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") return + # define year if year == 'current': if datetime.date.today() < datetime.date(int(datetime.date.today().year), 9, 21): year = datetime.date.today().year - 1 else: year = datetime.date.today().year + # define start_date and end_date if month is None: start_date = datetime.date(int(year), 9, 21) end_date = datetime.date(int(year) + 1, 3, 22) - group2 = 'year' elif valid_month(month): start_date = datetime.date(int(year), int(month), 1) end_date = start_date + relativedelta(months=+1) - datetime.timedelta(days=1) - group2 = 'month' else: - self.logger.error(f"kaeltesumme: Month for item={_database_item.id()} was {month}. This is not a valid month. Query cancelled.") + self.logger.error(f"_handle_kaeltesumme: Month for item={database_item.path()} was {month}. This is not a valid month. Query cancelled.") return + # define start / end today = datetime.date.today() if start_date > today: - self.logger.error(f"kaeltesumme: Start time for query of item={_database_item.id()} is in future. Query cancelled.") + self.logger.error(f"_handle_kaeltesumme: Start time for query of item={database_item.path()} is in future. Query cancelled.") return start = (today - start_date).days end = (today - end_date).days if end_date < today else 0 if start < end: - self.logger.error(f"kaeltesumme: End time for query of item={_database_item.id()} is before start time. Query cancelled.") + self.logger.error(f"_handle_kaeltesumme: End time for query of item={database_item.path()} is before start time. Query cancelled.") return - _db_addon_params = STD_REQUEST_DICT.get('kaltesumme_year_month', None) - _db_addon_params.update({'start': start, 'end': end, 'group2': group2, 'item': _database_item}) - - # query db and generate values - _result = self._query_item(**_db_addon_params) - self.logger.debug(f"kaeltesumme: {_result=} for {_database_item.id()=} with {year=} and {month=}") + # get raw data as list + self.logger.debug("_handle_kaeltesumme: Try to get raw data") + raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') + if self.execute_debug: + self.logger.debug(f"_handle_kaeltesumme: raw_value_list={raw_data=}") # calculate value - value = 0 - if _result == [[None, None]]: + if raw_data is None: return - try: - if month: - value = _result[0][1] - else: - for entry in _result: - entry_value = entry[1] - if entry_value: - value += entry_value - return int(value) - except Exception as e: - self.logger.error(f"Error {e} occurred during calculation of kaeltesumme with {_result=} for {_database_item.id()=} with {year=} and {month=}") + elif isinstance(raw_data, list): + # akkumulieren alle negativen Werte + ks = 0 + for entry in raw_data: + if entry[1] < 0: + ks -= entry[1] + return int(round(ks, 0)) - def _handle_waermesumme(self, _database_item: Item, year: Union[int, str], month: Union[int, str] = None) -> Union[int, None]: + def _handle_waermesumme(self, database_item: Item, year: Union[int, str], month: Union[int, str] = None, threshold: int = 0) -> Union[int, None]: """ Query database for waermesumme for given year or year/month + https://de.wikipedia.org/wiki/W%C3%A4rmesumme - :param _database_item: item object or item_id for which the query should be done - :param year: year the waermesumme should be calculated for + :param database_item: item object or item_id for which the query should be done + :param year: year the waermesumme should be calculated for; "current" for current year :param month: month the waermesumme should be calculated for :return: waermesumme """ + # start: links / älterer Termin end: rechts / jüngerer Termin + + # check validity of given year if not valid_year(year): - self.logger.error(f"waermesumme: Year for item={_database_item.id()} was {year}. This is not a valid year. Query cancelled.") + self.logger.error(f"_handle_waermesumme: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") return + # define year if year == 'current': year = datetime.date.today().year + # define start_date, end_date if month is None: - start_date = datetime.date(int(year), 3, 20) + start_date = datetime.date(int(year), 1, 1) end_date = datetime.date(int(year), 9, 21) - group2 = 'year' elif valid_month(month): start_date = datetime.date(int(year), int(month), 1) end_date = start_date + relativedelta(months=+1) - datetime.timedelta(days=1) - group2 = 'month' else: - self.logger.error(f"waermesumme: Month for item={_database_item.id()} was {month}. This is not a valid month. Query cancelled.") + self.logger.error(f"_handle_waermesumme: Month for item={database_item.path()} was {month}. This is not a valid month. Query cancelled.") return + # check start_date today = datetime.date.today() if start_date > today: - self.logger.info(f"waermesumme: Start time for query of item={_database_item.id()} is in future. Query cancelled.") + self.logger.info(f"_handle_waermesumme: Start time for query of item={database_item.path()} is in future. Query cancelled.") return + # define start / end start = (today - start_date).days end = (today - end_date).days if end_date < today else 0 + + # check end if start < end: - self.logger.error(f"waermesumme: End time for query of item={_database_item.id()} is before start time. Query cancelled.") + self.logger.error(f"_handle_waermesumme: End time for query of item={database_item.path()} is before start time. Query cancelled.") return - _db_addon_params = STD_REQUEST_DICT.get('waermesumme_year_month', None) - _db_addon_params.update({'start': start, 'end': end, 'group2': group2, 'item': _database_item}) + # get raw data as list + raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') + if self.execute_debug: + self.logger.debug(f"_handle_waermesumme: raw_value_list={raw_data=}") - # query db and generate values - _result = self._query_item(**_db_addon_params)[0][1] - self.logger.debug(f"waermesumme_year_month: {_result=} for {_database_item.id()=} with {year=} and {month=}") + # set threshold to min 0 + threshold = max(0, threshold) # calculate value - if _result == [[None, None]]: + if raw_data is None: return + elif isinstance(raw_data, list): + # akkumulieren alle Werte, größer/gleich Schwellenwert + ws = 0 + for entry in raw_data: + if entry[1] >= threshold: + ws += entry[1] + return int(round(ws, 0)) - if _result is not None: - return int(_result) - else: - return - - def _handle_gruenlandtemperatursumme(self, _database_item: Item, year: Union[int, str]) -> Union[int, None]: + def _handle_gruenlandtemperatursumme(self, database_item: Item, year: Union[int, str]) -> Union[int, None]: """ Query database for gruenlandtemperatursumme for given year or year/month + https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme - :param _database_item: item object or item_id for which the query should be done + :param database_item: item object for which the query should be done :param year: year the gruenlandtemperatursumme should be calculated for :return: gruenlandtemperatursumme """ if not valid_year(year): - self.logger.error(f"gruenlandtemperatursumme: Year for item={_database_item.id()} was {year}. This is not a valid year. Query cancelled.") + self.logger.error(f"_handle_gruenlandtemperatursumme: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") + return + + # define year + if year == 'current': + year = datetime.date.today().year + + # define start_date, end_date + start_date = datetime.date(int(year), 1, 1) + end_date = datetime.date(int(year), 9, 21) + + # check start_date + today = datetime.date.today() + if start_date > today: + self.logger.info(f"_handle_gruenlandtemperatursumme: Start time for query of item={database_item.path()} is in future. Query cancelled.") + return + + # define start / end + start = (today - start_date).days + end = (today - end_date).days if end_date < today else 0 + + # check end + if start < end: + self.logger.error(f"_handle_gruenlandtemperatursumme: End time for query of item={database_item.path()} is before start time. Query cancelled.") + return + + # get raw data as list + raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='raw') + if self.execute_debug: + self.logger.debug(f"_handle_gruenlandtemperatursumme: raw_value_list={raw_data}") + + # calculate value + if raw_data is None: return + elif isinstance(raw_data, list): + # akkumulieren alle positiven Tagesmitteltemperaturen, im Januar gewichtet mit 50%, im Februar mit 75% + gts = 0 + for entry in raw_data: + timestamp, value = entry + if value > 0: + dt = datetime.datetime.fromtimestamp(timestamp / 1000) + if dt.month == 1: + value = value * 0.5 + elif dt.month == 2: + value = value * 0.75 + gts += value + return int(round(gts, 0)) - current_year = datetime.date.today().year + def _handle_wachstumsgradtage(self, database_item: Item, year: Union[int, str], method: int = 0, threshold: int = 10): + """ + Calculate "wachstumsgradtage" for given year with temperature thershold + https://de.wikipedia.org/wiki/Wachstumsgradtag + :param database_item: item object or item_id for which the query should be done + :param year: year the wachstumsgradtage should be calculated for + :param method: calculation method to be used + :param threshold: temperature in °C as threshold for evaluation + :return: wachstumsgradtage + """ + + if not valid_year(year): + self.logger.error(f"_handle_wachstumsgradtage: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") + return + + # define year if year == 'current': - year = current_year + year = datetime.date.today().year + + # define start_date, end_date + start_date = datetime.date(int(year), 1, 1) + end_date = datetime.date(int(year), 9, 21) - year = int(year) - year_delta = current_year - year - if year_delta < 0: - self.logger.error(f"gruenlandtemperatursumme: Start time for query of item={_database_item.id()} is in future. Query cancelled.") + # check start_date + today = datetime.date.today() + if start_date > today: + self.logger.info(f"_handle_wachstumsgradtage: Start time for query of item={database_item.path()} is in future. Query cancelled.") return - _db_addon_params = STD_REQUEST_DICT.get('gts', None) - _db_addon_params.update({'start': year_delta, 'end': year_delta, 'item': _database_item}) + # define start / end + start = (today - start_date).days + end = (today - end_date).days if end_date < today else 0 - # query db and generate values - _result = self._query_item(**_db_addon_params) + # check end + if start < end: + self.logger.error(f"_handle_wachstumsgradtage: End time for query of item={database_item.path()} is before start time. Query cancelled.") + return - # calculate value and return it - if _result == [[None, None]]: + # get raw data as list + raw_data = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method='minmax') + if self.execute_debug: + self.logger.debug(f"_handle_wachstumsgradtage: raw_value_list={raw_data}") + + # calculate value + if raw_data is None: return - try: - gts = 0 - for entry in _result: - dt = datetime.datetime.fromtimestamp(int(entry[0]) / 1000) - if dt.month == 1: - gts += float(entry[1]) * 0.5 - elif dt.month == 2: - gts += float(entry[1]) * 0.75 + elif isinstance(raw_data, list): + # Die Berechnung des einfachen Durchschnitts // akkumuliere positive Differenz aus Mittelwert aus Tagesminimaltemperatur und Tagesmaximaltemperatur limitiert auf 30°C und Schwellenwert + wgte = 0 + wgte_list = [] + if method == 0 or method == 10: + self.logger.info(f"Caluclate 'Wachstumsgradtag' according to 'Berechnung des einfachen Durchschnitts'.") + for entry in raw_data: + timestamp, min_val, max_val = entry + wgt = (((min_val + min(30, max_val)) / 2) - threshold) + if wgt > 0: + wgte += wgt + wgte_list.append([timestamp, int(round(wgte, 0))]) + if method == 0: + return int(round(wgte, 0)) else: - gts += entry[1] - return int(round(gts, 0)) - except Exception as e: - self.logger.error(f"Error {e} occurred during calculation of gruenlandtemperatursumme with {_result=} for {_database_item.id()=}") + return wgte_list + + # Die modifizierte Berechnung des einfachen Durchschnitts. // akkumuliere positive Differenz aus Mittelwert aus Tagesminimaltemperatur mit mind Schwellentemperatur und Tagesmaximaltemperatur limitiert auf 30°C und Schwellenwert + elif method == 1 or method == 11: + self.logger.info(f"Caluclate 'Wachstumsgradtag' according to 'Modifizierte Berechnung des einfachen Durchschnitts'.") + for entry in raw_data: + timestamp, min_val, max_val = entry + wgt = (((max(threshold, min_val) + min(30.0, max_val)) / 2) - threshold) + if wgt > 0: + wgte += wgt + wgte_list.append([timestamp, int(round(wgte, 0))]) + if method == 1: + return int(round(wgte, 0)) + else: + return wgte_list + + # Zähle Tage, bei denen die Tagesmitteltemperatur oberhalb des Schwellenwertes lag + elif method == 2 or method == 12: + self.logger.info(f"Caluclate 'Wachstumsgradtag' according to 'Anzahl der Tage, bei denen die Tagesmitteltemperatur oberhalb des Schwellenwertes lag'.") + for entry in raw_data: + timestamp, min_val, max_val = entry + wgt = (((min_val + min(30, max_val)) / 2) - threshold) + if wgt > 0: + wgte += 1 + wgte_list.append([timestamp, wgte]) + if method == 0: + return wgte + else: + return wgte_list + + else: + self.logger.info(f"Method for 'Wachstumsgradtag' calculation not defined.'") - def _handle_tagesmitteltemperatur(self, _database_item: Item, count: int = None) -> list: + def _handle_temperaturserie(self, database_item: Item, year: Union[int, str], method: str = 'raw'): """ - Query database for tagesmitteltemperatur + provide list of lists having timestamp and temperature(s) per day - :param _database_item: item object or item_id for which the query should be done - :param count: start of timeframe defined by number of time increments starting from now to the left (into the past) - :return: tagesmitteltemperatur + :param database_item: item object or item_id for which the query should be done + :param year: year the wachstumsgradtage should be calculated for + :param method: calculation method to be used + :return: list of temperatures """ - start, end = count_to_start(count) - _db_addon_params = STD_REQUEST_DICT.get('tagesmittelwert_hour_days', None) - _db_addon_params.update({'item': _database_item, 'start': start, 'end': end}) + if not valid_year(year): + self.logger.error(f"_handle_temepraturserie: Year for item={database_item.path()} was {year}. This is not a valid year. Query cancelled.") + return + + # define year + if year == 'current': + year = datetime.date.today().year + + # define start_date, end_date + start_date = datetime.date(int(year), 1, 1) + end_date = datetime.date(int(year), 12, 31) - return self._query_item(**_db_addon_params)[0][1] + # check start_date + today = datetime.date.today() + if start_date > today: + self.logger.info(f"_handle_temepraturserie: Start time for query of item={database_item.path()} is in future. Query cancelled.") + return + + # define start / end + start = (today - start_date).days + end = (today - end_date).days if end_date < today else 0 + + # check end + if start < end: + self.logger.error(f"_handle_temepraturserie: End time for query of item={database_item.path()} is before start time. Query cancelled.") + return + + # check method + if method not in ['hour', 'raw', 'minmax']: + self.logger.error(f"_handle_temepraturserie: Calculation method {method!r} unknown. Need to be 'hour', 'raw' or 'minmax'. Query cancelled.") + return + + # get raw data as list + temp_list = self._prepare_temperature_list(database_item=database_item, timeframe='day', start=start, end=end, method=method) + if self.execute_debug: + self.logger.debug(f"_handle_temepraturserie: {temp_list=}") + + return temp_list + + def _prepare_temperature_list(self, database_item: Item, timeframe: str, start: int, end: int = 0, ignore_value=None, method: str = 'hour') -> Union[list, None]: + """ + returns list of lists having timestamp and temperature(s) per day + + :param database_item: item object or item_id for which the query should be done + :param timeframe: timeframe for query + :param start: increments for timeframe from now to start + :param end: increments for timeframe from now to end + :param ignore_value: value to be ignored during query + :param method: Calculation method + :return: list of temperatures + """ + + def _create_temp_dict() -> dict: + """create dict based on database query result like {'date1': {'hour1': [temp values], 'hour2': [temp values], ...}, 'date2': {'hour1': [temp values], 'hour2': [temp values], ...}, ...}""" + + _temp_dict = {} + for _entry in raw_data: + dt = datetime.datetime.utcfromtimestamp(_entry[0] / 1000) + date = dt.strftime('%Y-%m-%d') + hour = dt.strftime('%H') + if date not in _temp_dict: + _temp_dict[date] = {} + if hour not in _temp_dict[date]: + _temp_dict[date][hour] = [] + _temp_dict[date][hour].append(_entry[1]) + return _temp_dict + + def _calculate_hourly_average(): + """ calculate hourly average based on list of temperatures and update temp_dict""" + + for _date in temp_dict: + for hour in temp_dict[_date]: + hour_raw_value_list = temp_dict[_date][hour] + # hour_value = round(sum(hour_raw_value_list) / len(hour_raw_value_list), 1) # Durchschnittsbildung über alle Werte der Liste + hour_value = hour_raw_value_list[0] # Nehme den ersten Wert der Liste als Stundenwert (kommt am nächsten an die Definition, den Wert exakt zur vollen Stunden zu nehmen) + temp_dict[_date][hour] = [hour_value] + + def _create_list_timestamp_avgtemp() -> list: + """Create list of list with [[timestamp1, value1], [timestamp2, value2], ...] based on temp_dict""" + + _temp_list = [] + for _date in temp_dict: + + # wenn mehr als 20 Stundenwerte vorliegen, berechne den Tagesdurchschnitt über alle Werte + if len(temp_dict[_date]) >= 20: + _values = sum(list(temp_dict[_date].values()), []) + _values_avg = round(sum(_values) / len(_values), 1) + + # wenn für 00, 06, 12 und 18 Uhr Werte vorliegen, berechne den Tagesdurchschnitt über diese Werte + elif '00' in temp_dict[_date] and '06' in temp_dict[_date] and '12' in temp_dict[_date] and '18' in temp_dict[_date]: + _values_avg = round((temp_dict[_date]['00'][0] + temp_dict[_date]['06'][0] + temp_dict[_date]['12'][0] + temp_dict[_date]['18'][0]) / 4, 1) + + # sonst berechne den Tagesdurchschnitt über alle Werte + else: + _values = sum(list(temp_dict[_date].values()), []) + _values_avg = round(sum(_values) / len(_values), 1) + + _timestamp = datetime_to_timestamp(datetime.datetime.strptime(_date, '%Y-%m-%d')) + _temp_list.append([_timestamp, _values_avg]) + return _temp_list + + def _create_list_timestamp_minmaxtemp() -> list: + """Create list of list with [[timestamp1, min value1, max_value1], [timestamp2, min value2, max_value2], ...] based on temp_dict""" + + _temp_list = [] + for _date in temp_dict: + _timestamp = datetime_to_timestamp(datetime.datetime.strptime(_date, '%Y-%m-%d')) + _day_values = sum(list(temp_dict[_date].values()), []) + _temp_list.append([_timestamp, min(_day_values), max(_day_values)]) + return _temp_list + + # temp_list = [[timestamp1, avg-value1], [timestamp2, avg-value2], [timestamp3, avg-value3], ...] Tagesmitteltemperatur pro Stunde wird in der Datenbank per avg ermittelt + if method == 'hour': + raw_data = self._query_item(func='avg', item=database_item, timeframe=timeframe, start=start, end=end, group='hour', ignore_value=ignore_value) + self.logger.debug(f"{raw_data=}") + + if raw_data and isinstance(raw_data, list): + if raw_data == [[None, None]]: + return + + # create nested dict with temps + temp_dict = _create_temp_dict() + + # create list of list like database query response + temp_list = _create_list_timestamp_avgtemp() + self.logger.debug(f"{temp_list=}") + return temp_list + + # temp_list = [[timestamp1, avg-value1], [timestamp2, avg-value2], [timestamp3, avg-value3], ...] Tagesmitteltemperatur pro Stunde wird hier im Plugin ermittelt ermittelt + elif method == 'raw': + raw_data = self._query_item(func='raw', item=database_item, timeframe=timeframe, start=start, end=end, ignore_value=ignore_value) + self.logger.debug(f"{raw_data=}") + + if raw_data and isinstance(raw_data, list): + if raw_data == [[None, None]]: + return + + # create nested dict with temps + temp_dict = _create_temp_dict() + self.logger.debug(f"raw: {temp_dict=}") + + # calculate 'tagesdurchschnitt' and create list of list like database query response + _calculate_hourly_average() + self.logger.debug(f"raw: {temp_dict=}") + + # create list of list like database query response + temp_list = _create_list_timestamp_avgtemp() + self.logger.debug(f"{temp_list=}") + return temp_list + + # temp_list = [[timestamp1, min-value1, max-value1], [timestamp2, min-value2, max-value2], [timestamp3, min-value3, max-value3], ...] + elif method == 'minmax': + raw_data = self._query_item(func='raw', item=database_item, timeframe=timeframe, start=start, end=end, ignore_value=ignore_value) + self.logger.debug(f"{raw_data=}") + + if raw_data and isinstance(raw_data, list): + if raw_data == [[None, None]]: + return + + # create nested dict with temps + temp_dict = _create_temp_dict() + self.logger.debug(f"raw: {temp_dict=}") + + # create list of list like database query response + temp_list = _create_list_timestamp_minmaxtemp() + self.logger.debug(f"{temp_list=}") + return temp_list def _create_due_items(self) -> list: """ @@ -1273,32 +1856,27 @@ def _create_due_items(self) -> list: """ + # täglich zu berechnende Items zur Action Liste hinzufügen _todo_items = set() - _todo_items.update(set(self._daily_items)) + _todo_items.update(set(self._daily_items())) self.current_values[DAY] = {} self.previous_values[DAY] = {} - # wenn jetzt Wochentag = Montag ist, werden auch die wöchentlichen Items berechnet + # wenn Wochentag == Montag, werden auch die wöchentlichen Items berechnet if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.weekday(self.shtime.today()) == 1: - _todo_items.update(set(self._weekly_items)) - # self.wochenwert_dict = {} - # self.vorwochenendwert_dict = {} + _todo_items.update(set(self._weekly_items())) self.current_values[WEEK] = {} self.previous_values[WEEK] = {} - # wenn jetzt der erste Tage eines Monates ist, werden auch die monatlichen Items berechnet + # wenn der erste Tage eines Monates ist, werden auch die monatlichen Items berechnet if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1: - _todo_items.update(set(self._monthly_items)) - # self.monatswert_dict = {} - # self.vormonatsendwert_dict = {} + _todo_items.update(set(self._monthly_items())) self.current_values[MONTH] = {} self.previous_values[MONTH] = {} - # wenn jetzt der erste Tage des ersten Monates eines Jahres ist, werden auch die jährlichen Items berechnet + # wenn der erste Tage des ersten Monates eines Jahres ist, werden auch die jährlichen Items berechnet if self.shtime.now().hour == 0 and self.shtime.now().minute == 0 and self.shtime.now().day == 1 and self.shtime.now().month == 1: - _todo_items.update(set(self._yearly_items)) - # self.jahreswert_dict = {} - # self.vorjahresendwert_dict = {} + _todo_items.update(set(self._yearly_items())) self.current_values[YEAR] = {} self.previous_values[YEAR] = {} @@ -1418,7 +1996,7 @@ def _get_oldest_log(self, item: Item) -> int: self.item_cache[item]['oldest_log'] = _oldest_log if self.prepare_debug: - self.logger.debug(f"_get_oldest_log for item {item.id()} = {_oldest_log}") + self.logger.debug(f"_get_oldest_log for item {item.path()} = {_oldest_log}") return _oldest_log @@ -1450,10 +2028,10 @@ def _get_oldest_value(self, item: Item) -> Union[int, float, bool]: validity = True elif i == 10: validity = True - self.logger.error(f"oldest_value for item {item.id()} could not be read; value is set to -999999999") + self.logger.error(f"oldest_value for item {item.path()} could not be read; value is set to -999999999") if self.prepare_debug: - self.logger.debug(f"_get_oldest_value for item {item.id()} = {_oldest_value}") + self.logger.debug(f"_get_oldest_value for item {item.path()} = {_oldest_value}") return _oldest_value @@ -1465,19 +2043,20 @@ def _get_itemid(self, item: Item) -> int: :return: id of the item within the database """ - # self.logger.debug(f"_get_itemid called with item={item.id()}") + # self.logger.debug(f"_get_itemid called with item={item.path()}") _item_id = self.item_cache.get(item, {}).get('id', None) + if _item_id is None: - row = self._read_item_table(item) - if row: - if len(row) > 0: - _item_id = int(row[0]) - if item not in self.item_cache: - self.item_cache[item] = {} - self.item_cache[item]['id'] = _item_id + row = self._read_item_table(item_path=str(item.path())) + if row and len(row) > 0: + _item_id = int(row[0]) + if item not in self.item_cache: + self.item_cache[item] = {} + self.item_cache[item]['id'] = _item_id + return _item_id - def _get_itemid_for_query(self, item: Item) -> Union[int, None]: + def _get_itemid_for_query(self, item: Union[Item, str, int]) -> Union[int, None]: """ Get DB item id for query @@ -1495,89 +2074,7 @@ def _get_itemid_for_query(self, item: Item) -> Union[int, None]: item_id = None return item_id - def _handle_query_result(self, query_result: Union[list, None]) -> list: - """ - Handle query result containing list - - :param query_result: list of query result with [[value, value], [value, value] for regular result, [[None, None]] for errors, [[0,0]] for 'no values for requested timeframe' - - """ - - # if query delivers None, abort - if query_result is None: - # if query delivers None, abort - self.logger.error(f"Error occurred during _query_item. Aborting...") - _result = [[None, None]] - elif len(query_result) == 0: - _result = [[0, 0]] - self.logger.info(f" No values for item in requested timeframe in database found.") - else: - _result = [] - for element in query_result: - timestamp = element[0] - value = element[1] - if timestamp and value is not None: - _result.append([timestamp, round(value, 1)]) - if not _result: - _result = [[None, None]] - - # if self.prepare_debug: - # self.logger.debug(f"_handle_query_result: {_result=}") - - return _result - - def _consumption_calc(self, item, timeframe: str, start: int, end: int) -> Union[float, None]: - """ - Handle query for Verbrauch - - :param item: item, the query should be done for - :param timeframe: timeframe as week, month, year - :param start: beginning of timeframe - :param start: end of timeframe - - """ - - if self.prepare_debug: - self.logger.debug(f"_consumption_calc called with {item=},{timeframe=},{start=},{end=}") - - _result = None - - # get value for end and check it; - value_end = self._query_item(func='max', item=item, timeframe=timeframe, start=end, end=end)[0][1] - if self.prepare_debug: - self.logger.debug(f"_consumption_calc {value_end=}") - - if value_end is None: # if None (Error) return - return - elif value_end == 0: # wenn die Query "None" ergab, was wiederum bedeutet, dass zum Abfragezeitpunkt keine Daten vorhanden sind, ist der value hier gleich 0 → damit der Verbrauch für die Abfrage auch Null - _result = 0 - else: - # get value for start and check it; - # value_start = self._query_item(func='max', item=item, timeframe=timeframe, start=start, end=start)[0][1] - value_start = self._query_item(func='min', item=item, timeframe=timeframe, start=end, end=end)[0][1] - if self.prepare_debug: - self.logger.debug(f"_consumption_calc {value_start=}") - - if value_start is None: # if None (Error) return - return - - # ToDo: Prüfen, unter welchen Bedingungen value_start == 0 bzw. wie man den nächsten Eintrag nutzt. - if value_start == 0: # wenn der Wert zum Startzeitpunkt 0 ist, gab es dort keinen Eintrag (also keinen Verbrauch), dann frage den nächsten Eintrag in der DB ab. - self.logger.info(f"No DB Entry found for requested start date. Looking for next DB entry.") - # value_start = self._handle_query_result(self._query_log_next(item=item, timeframe=timeframe, timedelta=start))[0][1] - value_start = self._handle_query_result(self._query_item(func='next', item=item, timeframe=timeframe, start=start))[0][1] - if self.prepare_debug: - self.logger.debug(f"_consumption_calc: next available value is {value_start=}") - - if value_end is not None and value_start is not None: - _result = round(value_end - value_start, 1) - - if self.prepare_debug: - self.logger.debug(f"_consumption_calc: {_result=} for {item=},{timeframe=},{start=},{end=}") - - return _result - - def _query_item(self, func: str, item, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value=None) -> list: + def _query_item(self, func: str, item: Item, timeframe: str, start: int = None, end: int = 0, group: str = None, group2: str = None, ignore_value=None) -> list: """ Do diverse checks of input, and prepare query of log by getting item_id, start / end in timestamp etc. @@ -1593,29 +2090,62 @@ def _query_item(self, func: str, item, timeframe: str, start: int = None, end: i :return: query response / list for value pairs [[None, None]] for errors, [[0,0]] for """ + def _handle_query_result(query_result) -> list: + """ + Handle query result containing list + """ + + # if query delivers None, abort + if query_result is None: + # if query delivers None, abort + self.logger.error(f"Error occurred during _query_item. Aborting...") + _result = [[None, None]] + elif len(query_result) == 0: + _result = [[0, 0]] + self.logger.info(f" No values for item in requested timeframe in database found.") + else: + _result = [] + for element in query_result: + timestamp = element[0] + value = element[1] + if timestamp and value is not None: + _result.append([timestamp, round(value, 1)]) + if not _result: + _result = [[None, None]] + + return _result + if self.prepare_debug: - self.logger.debug(f"_query_item called with {func=}, item={item.id()}, {timeframe=}, {start=}, {end=}, {group=}, {group2=}, {ignore_value=}") + self.logger.debug(f"_query_item called with {func=}, item={item.path()}, {timeframe=}, {start=}, {end=}, {group=}, {group2=}, {ignore_value=}") - # SET DEFAULT RESULT + # set default result result = [[None, None]] - # CHECK CORRECTNESS OF TIMEFRAME - if timeframe not in ["year", "month", "week", "day"]: - self.logger.error(f"_query_item: Requested {timeframe=} for item={item.id()} not defined; Need to be year, month, week, day'. Query cancelled.") + # check correctness of timeframe + if timeframe not in ALLOWED_QUERY_TIMEFRAMES: + self.logger.error(f"_query_item: Requested {timeframe=} for item={item.path()} not defined; Need to be 'year' or 'month' or 'week' or 'day' or 'hour''. Query cancelled.") return result - # CHECK CORRECTNESS OF START / END + # check start / end for being int + if isinstance(start, str) and start.isdigit(): + start = int(start) + if isinstance(end, str) and end.isdigit(): + end = int(end) + if not isinstance(start, int) and not isinstance(end, int): + return result + + # check correctness of start / end if start < end: - self.logger.warning(f"_query_item: Requested {start=} for item={item.id()} is not valid since {start=} < {end=}. Query cancelled.") + self.logger.warning(f"_query_item: Requested {start=} for item={item.path()} is not valid since {start=} < {end=}. Query cancelled.") return result - # DEFINE ITEM_ID - item_id = self._get_itemid_for_query(item) + # define item_id + item_id = self._get_itemid(item) if not item_id: - self.logger.error(f"_query_item: ItemId for item={item.id()} not found. Query cancelled.") + self.logger.error(f"_query_item: ItemId for item={item.path()} not found. Query cancelled.") return result - # DEFINE START AND END OF QUERY AS TIMESTAMP IN MICROSECONDS + # define start and end of query as timestamp in microseconds ts_start, ts_end = get_start_end_as_timestamp(timeframe, start, end) oldest_log = int(self._get_oldest_log(item)) @@ -1625,24 +2155,24 @@ def _query_item(self, func: str, item, timeframe: str, start: int = None, end: i if self.prepare_debug: self.logger.debug(f"_query_item: Requested {timeframe=} with {start=} and {end=} resulted in start being timestamp={ts_start} / {timestamp_to_timestring(ts_start)} and end being timestamp={ts_end} / {timestamp_to_timestring(ts_end)}") - # CHECK IF VALUES FOR END TIME AND START TIME ARE IN DATABASE + # check if values for end time and start time are in database if ts_end < oldest_log: # (Abfrage abbrechen, wenn Endzeitpunkt in UNIX-timestamp der Abfrage kleiner (und damit jünger) ist, als der UNIX-timestamp des ältesten Eintrages) - self.logger.info(f"_query_item: Requested end time timestamp={ts_end} / {timestamp_to_timestring(ts_end)} of query for Item='{item.id()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") + self.logger.info(f"_query_item: Requested end time timestamp={ts_end} / {timestamp_to_timestring(ts_end)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") return result if ts_start < oldest_log: if not self.use_oldest_entry: - self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.id()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") + self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Query cancelled.") return result else: - self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.id()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Oldest available entry will be used.") + self.logger.info(f"_query_item: Requested start time timestamp={ts_start} / {timestamp_to_timestring(ts_start)} of query for Item='{item.path()}' is prior to oldest entry with timestamp={oldest_log} / {timestamp_to_timestring(oldest_log)}. Oldest available entry will be used.") ts_start = oldest_log - log = self._query_log_timestamp(func=func, item_id=item_id, ts_start=ts_start, ts_end=ts_end, group=group, group2=group2, ignore_value=ignore_value) - result = self._handle_query_result(log) + query_params = {'func': func, 'item_id': item_id, 'ts_start': ts_start, 'ts_end': ts_end, 'group': group, 'group2': group2, 'ignore_value': ignore_value} + result = _handle_query_result(self._query_log_timestamp(**query_params)) if self.prepare_debug: - self.logger.debug(f"_query_item: value for item={item.id()} with {timeframe=}, {func=}: {result}") + self.logger.debug(f"_query_item: value for item={item.path()} with {timeframe=}, {func=}: {result}") return result @@ -1661,7 +2191,7 @@ def _init_cache_dicts(self) -> None: MONTH: {}, YEAR: {} } - + self.previous_values = { DAY: {}, WEEK: {}, @@ -1706,7 +2236,7 @@ def _work_item_queue_thread_shutdown(self): self.work_item_queue_thread = None ############################## - # DB Query Preparation + # Database Query Preparation ############################## def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: int, group: str = None, group2: str = None, ignore_value=None) -> Union[list, None]: @@ -1725,11 +2255,11 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i """ - # DO DEBUG LOG + # do debug log if self.prepare_debug: self.logger.debug(f"_query_log_timestamp: Called with {func=}, {item_id=}, {ts_start=}, {ts_end=}, {group=}, {group2=}, {ignore_value=}") - # DEFINE GENERIC QUERY PARTS + # define query parts _select = { 'avg': 'time, ROUND(AVG(val_num * duration) / AVG(duration), 1) as value ', 'avg1': 'time, ROUND(AVG(value), 1) as value FROM (SELECT time, ROUND(AVG(val_num), 1) as value ', @@ -1743,7 +2273,8 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i 'sum_avg': 'time, ROUND(SUM(value), 1) as value FROM (SELECT time, ROUND(AVG(val_num * duration) / AVG(duration), 1) as value ', 'sum_min_neg': 'time, ROUND(SUM(value), 1) as value FROM (SELECT time, IF(min(val_num) < 0, ROUND(MIN(val_num), 1), 0) as value ', 'diff_max': 'time, value1 - LAG(value1) OVER (ORDER BY time) AS value FROM (SELECT time, ROUND(MAX(val_num), 1) as value1 ', - 'next': 'time, val_num as value ' + 'next': 'time, val_num as value ', + 'raw': 'time, val_num as value ' } _table_alias = { @@ -1760,6 +2291,7 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i 'sum_min_neg': ') AS table1 ', 'diff_max': ') AS table1 ', 'next': '', + 'raw': '', } _order = "time DESC LIMIT 1 " if func == "next" else "time ASC " @@ -1768,29 +2300,25 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i _db_table = 'log ' - # DEFINE mySQL QUERY PARTS _group_by_sql = { - "year": "GROUP BY YEAR(FROM_UNIXTIME(time/1000)) ", - "month": "GROUP BY YEAR(FROM_UNIXTIME(time/1000)), MONTH(FROM_UNIXTIME(time/1000)) ", - "week": "GROUP BY YEARWEEK(FROM_UNIXTIME(time/1000), 5) ", - "day": "GROUP BY DATE(FROM_UNIXTIME(time/1000)) ", - "hour": "GROUP BY DATE(FROM_UNIXTIME(time/1000)), HOUR(FROM_UNIXTIME(time/1000)) ", + "year": "GROUP BY YEAR(FROM_UNIXTIME(time/1000)) ", + "month": "GROUP BY FROM_UNIXTIME((time/1000),'%Y%m') ", + "week": "GROUP BY YEARWEEK(FROM_UNIXTIME(time/1000), 5) ", + "day": "GROUP BY DATE(FROM_UNIXTIME(time/1000)) ", + "hour": "GROUP BY FROM_UNIXTIME((time/1000),'%Y%m%d%H') ", None: '' } - # DEFINE SQLITE QUERY PARTS _group_by_sqlite = { - "year": "GROUP BY strftime('%Y', date((time/1000),'unixepoch')) ", + "year": "GROUP BY strftime('%Y', date((time/1000),'unixepoch')) ", "month": "GROUP BY strftime('%Y%m', date((time/1000),'unixepoch')) ", - "week": "GROUP BY strftime('%Y%W', date((time/1000),'unixepoch')) ", - "day": "GROUP BY date((time/1000),'unixepoch') ", - "hour": "GROUP BY date((time/1000),'unixepoch'), strftime('%H', date((time/1000),'unixepoch')) ", + "week": "GROUP BY strftime('%Y%W', date((time/1000),'unixepoch')) ", + "day": "GROUP BY date((time/1000),'unixepoch') ", + "hour": "GROUP BY strftime('%Y%m%d%H', datetime((time/1000),'unixepoch')) ", None: '' } - ###################################### - - # SELECT QUERY PARTS DEPENDING IN DB DRIVER + # select query parts depending in db driver if self.db_driver.lower() == 'pymysql': _group_by = _group_by_sql elif self.db_driver.lower() == 'sqlite3': @@ -1799,71 +2327,58 @@ def _query_log_timestamp(self, func: str, item_id: int, ts_start: int, ts_end: i self.logger.error('DB Driver unknown') return - # CHECK CORRECTNESS OF FUNC + # check correctness of func if func not in _select: self.logger.error(f"_query_log_timestamp: Requested {func=} for {item_id=} not defined. Query cancelled.") return - # CHECK CORRECTNESS OF GROUP AND GROUP2 + # check correctness of group and group2 if group not in _group_by: self.logger.error(f"_query_log_timestamp: Requested {group=} for item={item_id=} not defined. Query cancelled.") return if group2 not in _group_by: - self.logger.error(f"_query_log_timestamp: Requested {group=} for item={item_id=} not defined. Query cancelled.") + self.logger.error(f"_query_log_timestamp: Requested {group2=} for item={item_id=} not defined. Query cancelled.") return - # HANDLE IGNORE VALUES + # handle ignore values if func in ['min', 'max', 'max1', 'sum_max', 'sum_avg', 'sum_min_neg', 'diff_max']: # extend _where statement for excluding boolean values == 0 for defined functions _where = f'{_where}AND val_bool = 1 ' if ignore_value: # if value to be ignored are defined, extend _where statement _where = f'{_where}AND val_num != {ignore_value} ' - # SET PARAMS - params = { - 'item_id': item_id, - 'ts_start': ts_start - } - + # set params + params = {'item_id': item_id, 'ts_start': ts_start} if func != "next": - params['ts_end'] = ts_end + params.update({'ts_end': ts_end}) - # ASSEMBLE QUERY + # assemble query query = f"SELECT {_select[func]}FROM {_db_table}WHERE {_where}{_group_by[group]}ORDER BY {_order}{_table_alias[func]}{_group_by[group2]}".strip() if self.db_driver.lower() == 'sqlite3': query = query.replace('IF', 'IIF') - # DO DEBUG LOG + # do debug log if self.prepare_debug: self.logger.debug(f"_query_log_timestamp: {query=}, {params=}") - # REQUEST DATABASE AND RETURN RESULT + # request database and return result return self._fetchall(query, params) - def _read_log_all(self, item): + def _read_log_all(self, item_id: int): """ Read the oldest log record for given item - :param item: Item to read the record for - :type item: item - - :return: Log record for Item + :param item_id: item_id to read the record for + :return: Log record for item_id """ if self.prepare_debug: - self.logger.debug(f"_read_log_all: Called for item={item}") + self.logger.debug(f"_read_log_all: Called for {item_id=}") - # DEFINE ITEM_ID - create item_id from item or string input of item_id and break, if not given - item_id = self._get_itemid_for_query(item) - if not item_id: - self.logger.error(f"_read_log_all: ItemId for item={item.id()} not found. Query cancelled.") - return - - if item_id: - query = "SELECT * FROM log WHERE (item_id = :item_id) AND (time = None OR 1 = 1)" - params = {'item_id': item_id} - result = self._fetchall(query, params) - return result + query = "SELECT * FROM log WHERE (item_id = :item_id) AND (time = None OR 1 = 1)" + params = {'item_id': item_id} + result = self._fetchall(query, params) + return result def _read_log_oldest(self, item_id: int, cur=None) -> int: """ @@ -1897,12 +2412,12 @@ def _read_log_timestamp(self, item_id: int, timestamp: int, cur=None) -> Union[l query = "SELECT * FROM log WHERE item_id = :item_id AND time = :timestamp;" return self._fetchall(query, params, cur=cur) - def _read_item_table(self, item): + def _read_item_table(self, item_id: int = None, item_path: str = None): """ Read item table - :param item: name or Item_id of the item within the database - :type item: item + :param item_id: unique ID for item within database + :param item_path: item_path for Item within the database :return: Data for the selected item :rtype: tuple @@ -1911,14 +2426,15 @@ def _read_item_table(self, item): columns_entries = ('id', 'name', 'time', 'val_str', 'val_num', 'val_bool', 'changed') columns = ", ".join(columns_entries) - if isinstance(item, Item): - query = f"SELECT {columns} FROM item WHERE name = '{str(item.id())}'" - return self._fetchone(query) + if item_id is None and item_path is None: + return - elif isinstance(item, str) and item.isdigit(): - item = int(item) - query = f"SELECT {columns} FROM item WHERE id = {item}" - return self._fetchone(query) + if item_id: + query = f"SELECT {columns} FROM item WHERE id = {item_id}" + else: + query = f"SELECT {columns} FROM item WHERE name = '{item_path}'" + + return self._fetchone(query) def _get_db_version(self) -> str: """ @@ -1928,7 +2444,7 @@ def _get_db_version(self) -> str: query = 'SELECT sqlite_version()' if self.db_driver.lower() == 'sqlite3' else 'SELECT VERSION()' return self._fetchone(query)[0] - def _get_db_connect_timeout(self) -> str: + def _get_db_connect_timeout(self) -> list: """ Query database timeout """ @@ -1936,7 +2452,7 @@ def _get_db_connect_timeout(self) -> str: query = "SHOW GLOBAL VARIABLES LIKE 'connect_timeout'" return self._fetchone(query) - def _get_db_net_read_timeout(self) -> str: + def _get_db_net_read_timeout(self) -> list: """ Query database timeout net_read_timeout """ @@ -1945,29 +2461,28 @@ def _get_db_net_read_timeout(self) -> str: return self._fetchone(query) ############################## - # Database specific stuff + # Database Queries ############################## - def _execute(self, query: str, params: dict = None, cur=None): + def _execute(self, query: str, params: dict = None, cur=None) -> list: if params is None: params = {} return self._query(self._db.execute, query, params, cur) - def _fetchone(self, query: str, params: dict = None, cur=None): + def _fetchone(self, query: str, params: dict = None, cur=None) -> list: if params is None: params = {} return self._query(self._db.fetchone, query, params, cur) - def _fetchall(self, query: str, params: dict = None, cur=None): + def _fetchall(self, query: str, params: dict = None, cur=None) -> list: if params is None: params = {} - tuples = self._query(self._db.fetchall, query, params, cur) - return None if tuples is None else list(tuples) + return self._query(self._db.fetchall, query, params, cur) - def _query(self, fetch, query: str, params: dict = None, cur=None): + def _query(self, fetch, query: str, params: dict = None, cur=None) -> Union[None, list]: if params is None: params = {} @@ -2001,12 +2516,13 @@ def _query(self, fetch, query: str, params: dict = None, cur=None): ############################## -# Helper functions +# Helper functions ############################## def params_to_dict(string: str) -> Union[dict, None]: - """ Parse a string with named arguments and comma separation to dict; (e.g. string = 'year=2022, month=12') + """ + Parse a string with named arguments and comma separation to dict; (e.g. string = 'year=2022, month=12') """ try: @@ -2082,13 +2598,14 @@ def convert_timeframe(timeframe: str) -> str: 'jahr': 'year', 'vorjahreszeitraum': 'day', 'jahreszeitraum': 'day', + 'h': 'hour', 'd': 'day', 'w': 'week', 'm': 'month', 'y': 'year' } - return convertion.get(timeframe, None) + return convertion.get(timeframe) def convert_duration(timeframe: str, window_dur: str) -> int: @@ -2273,602 +2790,50 @@ def datetime_to_timestamp(dt: datetime) -> int: return int(dt.replace(tzinfo=datetime.timezone.utc).timestamp()) -def check_substring_in_str(lookfor: Union[str, list], target: str) -> bool: - for entry in lookfor: - if isinstance(entry, str): - if entry in target: - return True - elif isinstance(entry, list): - result = True - for element in entry: - result = result and element in target # einmal False setzt alles auf False - if result: - return True - return False - - -def onchange_attribute(db_addon_fct) -> bool: - """ - Return True if attribute indicates Item to be calculated on-change - - ONCHANGE_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', - 'minmax_heute_min', 'minmax_heute_max', - 'minmax_woche_min', 'minmax_woche_max', - 'minmax_monat_min', 'minmax_monat_max', - 'minmax_jahr_min', 'minmax_jahr_max'] - - """ - return True if not any(substring in db_addon_fct for substring in ['minus', 'serie', 'last']) else False - - -def daily_attribute(db_addon_fct) -> bool: - """ - Return True if attribute indicates Item to be calculated daily" - """ - return True if check_substring_in_str(['heute_minus', 'last_', 'jahreszeitraum', ['serie', 'tag'], ['serie', 'stunde']], db_addon_fct) else False - - -def weekly_attribute(db_addon_fct) -> bool: - """ - Return True if attribute indicates Item to be calculated weekly" - """ - return True if check_substring_in_str(['woche_minus', ['serie', 'woche']], db_addon_fct) else False - - -def monthly_attribute(db_addon_fct) -> bool: - """ - Return True if attribute indicates Item to be calculated daily" - """ - return True if check_substring_in_str(['monat_minus', ['serie', 'monat']], db_addon_fct) else False - +def to_int(arg) -> Union[int, None]: + try: + return int(arg) + except (ValueError, TypeError): + return None -def yearly_attribute(db_addon_fct) -> bool: - """ - Return True if attribute indicates Item to be calculated yearly" - """ - return True if check_substring_in_str(['jahr_minus', ['serie', 'jahr']], db_addon_fct) else False - - -STD_REQUEST_DICT = { - 'serie_minmax_monat_min_15m': {'func': 'min', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, - 'serie_minmax_monat_max_15m': {'func': 'max', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, - 'serie_minmax_monat_avg_15m': {'func': 'avg', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, - 'serie_minmax_woche_min_30w': {'func': 'min', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_minmax_woche_max_30w': {'func': 'max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_minmax_woche_avg_30w': {'func': 'avg', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_minmax_tag_min_30d': {'func': 'min', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_minmax_tag_max_30d': {'func': 'max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_minmax_tag_avg_30d': {'func': 'avg', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_verbrauch_tag_30d': {'func': 'diff_max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_verbrauch_woche_30w': {'func': 'diff_max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_verbrauch_monat_18m': {'func': 'diff_max', 'timeframe': 'month', 'start': 18, 'end': 0, 'group': 'month'}, - 'serie_zaehlerstand_tag_30d': {'func': 'max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, - 'serie_zaehlerstand_woche_30w': {'func': 'max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, - 'serie_zaehlerstand_monat_18m': {'func': 'max', 'timeframe': 'month', 'start': 18, 'end': 0, 'group': 'month'}, - 'serie_waermesumme_monat_24m': {'func': 'sum_max', 'timeframe': 'month', 'start': 24, 'end': 0, 'group': 'day', 'group2': 'month'}, - 'serie_kaeltesumme_monat_24m': {'func': 'sum_max', 'timeframe': 'month', 'start': 24, 'end': 0, 'group': 'day', 'group2': 'month'}, - 'serie_tagesmittelwert': {'func': 'max', 'timeframe': 'year', 'start': 0, 'end': 0, 'group': 'day'}, - 'serie_tagesmittelwert_stunde_0d': {'func': 'avg1', 'timeframe': 'day', 'start': 0, 'end': 0, 'group': 'hour', 'group2': 'day'}, - 'serie_tagesmittelwert_tag_stunde_30d': {'func': 'avg1', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'hour', 'group2': 'day'}, - 'waermesumme_year_month': {'func': 'sum_max', 'timeframe': 'day', 'start': None, 'end': None, 'group': 'day', 'group2': None}, - 'kaltesumme_year_month': {'func': 'sum_min_neg', 'timeframe': 'day', 'start': None, 'end': None, 'group': 'day', 'group2': None}, - 'gts': {'func': 'max', 'timeframe': 'year', 'start': None, 'end': None, 'group': 'day'}, - } -############################## -# Backup -############################## -# -# def _delta_value(self, item, time_str_1, time_str_2): -# """ Computes a difference of values on 2 points in time for an item -# -# :param item: Item, for which query should be done -# :param time_str_1: time sting as per database-Plugin for newer point in time (e.g.: 200i) -# :param time_str_2: Zeitstring gemäß database-Plugin for older point in time(e.g.: 400i) -# """ -# -# time_since_oldest_log = self._time_since_oldest_log(item) -# end = int(time_str_1[0:len(time_str_1) - 1]) -# -# if time_since_oldest_log > end: -# # self.logger.debug(f'_delta_value: fetch DB with {item.id()}.db(max, {time_str_1}, {time_str_1})') -# value_1 = self._db_plugin._single('max', time_str_1, time_str_1, item.id()) -# -# # self.logger.debug(f'_delta_value: fetch DB with {item.id()}.db(max, {time_str_2}, {time_str_2})') -# value_2 = self._db_plugin._single('max', time_str_2, time_str_2, item.id()) -# -# if value_1 is not None: -# if value_2 is None: -# self.logger.info(f'No entries for Item {item.id()} in DB found for requested enddate {time_str_1}; try to use oldest entry instead') -# value_2 = self._get_oldest_value(item) -# if value_2 is not None: -# value = round(value_1 - value_2, 2) -# # self.logger.debug(f'_delta_value for item={item.id()} with time_str_1={time_str_1} and time_str_2={time_str_2} is {value}') -# return value -# else: -# self.logger.info(f'_delta_value for item={item.id()} using time_str_1={time_str_1} is older as oldest_entry. Therefore no DB request initiated.') -# -# def _single_value(self, item, time_str_1, func='max'): -# """ Gets value at given point im time from database -# -# :param item: item, for which query should be done -# :param time_str_1: time sting as per database-Plugin for point in time (e.g.: 200i) -# :param func: function of database plugin -# """ -# -# # value = None -# # value = item.db(func, time_str_1, time_str_1) -# value = self._db_plugin._single(func, time_str_1, time_str_1, item.id()) -# if value is None: -# self.logger.info(f'No entries for Item {item.id()} in DB found for requested end {time_str_1}; try to use oldest entry instead') -# value = int(self._get_oldest_value(item)) -# # self.logger.debug(f'_single_value for item={item.id()} with time_str_1={time_str_1} is {value}') -# return value -# -# def _connect_to_db(self, host=None, user=None, password=None, db=None): -# """ Connect to DB via pymysql -# """ -# -# if not host: -# host = self.connection_data[0].split(':', 1)[1] -# if not user: -# user = self.connection_data[1].split(':', 1)[1] -# if not password: -# password = self.connection_data[2].split(':', 1)[1] -# if not db: -# db = self.connection_data[3].split(':', 1)[1] -# port = self.connection_data[4].split(':', 1)[1] -# -# try: -# connection = pymysql.connect(host=host, user=user, password=password, db=db, charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor) -# except Exception as e: -# self.logger.error(f"Connection to Database failed with error {e}!.") -# return -# else: -# return connection -# -# -# def _get_itemid_via_db_plugin(self, item): -# """ Get item_id of item out of dict or request it from db via database plugin and put it into dict -# """ -# -# # self.logger.debug(f"_get_itemid called for item={item}") -# -# _item_id = self.itemid_dict.get(item, None) -# if _item_id is None: -# _item_id = self._db_plugin.id(item) -# self.itemid_dict[item] = _item_id -# -# return _item_id -# -# def _get_time_strs(self, key, x): -# """ Create timestrings for database query depending in key with -# -# :param key: key for getting the time strings -# :param x: time difference as increment -# :return: tuple of timestrings (timestr closer to now, timestr more in the past) -# -# """ -# -# self.logger.debug(f"_get_time_strs called with key={key}, x={x}") -# -# if key == 'heute': -# _time_str_1 = self._time_str_heute_minus_x(x - 1) -# _time_str_2 = self._time_str_heute_minus_x(x) -# elif key == 'woche': -# _time_str_1 = self._time_str_woche_minus_x(x - 1) -# _time_str_2 = self._time_str_woche_minus_x(x) -# elif key == 'monat': -# _time_str_1 = self._time_str_monat_minus_x(x - 1) -# _time_str_2 = self._time_str_monat_minus_x(x) -# elif key == 'jahr': -# _time_str_1 = self._time_str_jahr_minus_x(x - 1) -# _time_str_2 = self._time_str_jahr_minus_x(x) -# elif key == 'vorjahreszeitraum': -# _time_str_1 = self._time_str_heute_minus_jahre_x(x + 1) -# _time_str_2 = self._time_str_jahr_minus_x(x+1) -# else: -# _time_str_1 = None -# _time_str_2 = None -# -# # self.logger.debug(f"_time_str_1={_time_str_1}, _time_str_2={_time_str_2}") -# return _time_str_1, _time_str_2 -# -# def _time_str_heute_minus_x(self, x=0): -# """ Creates an str for db request in min from time since beginning of today""" -# return f"{self.shtime.time_since(self.shtime.today(-x), 'im')}i" -# -# def _time_str_woche_minus_x(self, x=0): -# """ Creates an str for db request in min from time since beginning of week""" -# return f"{self.shtime.time_since(self.shtime.beginning_of_week(self.shtime.calendar_week(), None, -x), 'im')}i" -# -# def _time_str_monat_minus_x(self, x=0): -# """ Creates an str for db request in min for time since beginning of month""" -# return f"{self.shtime.time_since(self.shtime.beginning_of_month(None, None, -x), 'im')}i" -# -# def _time_str_jahr_minus_x(self, x=0): -# """ Creates an str for db request in min for time since beginning of year""" -# return f"{self.shtime.time_since(self.shtime.beginning_of_year(None, -x), 'im')}i" -# -# def _time_str_heute_minus_jahre_x(self, x=0): -# """ Creates an str for db request in min for time since now x years ago""" -# return f"{self.shtime.time_since(self.shtime.now() + relativedelta(years=-x), 'im')}i" -# -# def _time_since_oldest_log(self, item): -# """ Ermittlung der Zeit in ganzen Minuten zwischen "now" und dem ältesten Eintrag eines Items in der DB -# -# :param item: Item, for which query should be done -# :return: time in minutes from oldest entry to now -# """ -# -# _timestamp = self._get_oldest_log(item) -# _oldest_log_dt = datetime.datetime.fromtimestamp(int(_timestamp) / 1000, -# datetime.timezone.utc).astimezone().strftime( -# '%Y-%m-%d %H:%M:%S %Z%z') -# return self.shtime.time_since(_oldest_log_dt, resulttype='im') -# -# @staticmethod -# def _get_dbtimestamp_from_date(date): -# """ Compute a timestamp for database entry from given date -# -# :param date: datetime object / string of format 'yyyy-mm' -# """ -# -# d = None -# if isinstance(date, datetime.date): -# d = date -# elif isinstance(date, str): -# date = date.split('-') -# if len(date) == 2: -# year = int(date[0]) -# month = int(date[1]) -# if (1980 <= year <= datetime.date.today().year) and (1 <= month <= 12): -# d = datetime.date(year, month, 1) -# -# if d: -# return int(time.mktime(d.timetuple()) * 1000) -# -# def fetch_min_monthly_count(sh, item, count=None): -# _logger.warning(f"Die Userfunction 'fetch_min_monthly_count' wurde aufgerufen mit item {item} and count {count}") -# -# if type(item) is str: -# item = get_item_id(item) -# if count is None: -# # query = f"SELECT CONCAT(YEAR(FROM_UNIXTIME(time/1000)), '-', LPAD(MONTH(FROM_UNIXTIME(time/1000)), 2, '0')) AS Date, MIN(val_num) FROM log WHERE item_id = {item} GROUP BY Date ORDER BY Date ASC" -# query = f"SELECT time, MIN(val_num) FROM log WHERE item_id = {item} GROUP BY YEAR(FROM_UNIXTIME(time/1000)), MONTH(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# else: -# # query = f"SELECT CONCAT(YEAR(FROM_UNIXTIME(time/1000)), '-', LPAD(MONTH(FROM_UNIXTIME(time/1000)), 2, '0')) AS Date, MIN(val_num) FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(now(), INTERVAL {count} MONTH) GROUP BY Date ORDER BY Date ASC" -# query = f"SELECT time, MIN(val_num) FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(DATE_FORMAT(NOW() ,'%Y-%m-01'), INTERVAL {count} MONTH) GROUP BY YEAR(FROM_UNIXTIME(time/1000)), MONTH(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# -# value_list = [] -# for element in result: -# value_list.append([element['time'], element['MIN(val_num)']]) -# -# _logger.warning(f'mysql.fetch_min_monthly_count value_list: {value_list}') -# return value_list -# -# def fetch_max_monthly_count(sh, item, count=None): -# _logger.warning(f"Die Userfunction 'fetch_max_monthly_count' wurde aufgerufen mit item {item} and count {count}") -# -# if type(item) is str: -# item = get_item_id(item) -# if count is None: -# # query = f"SELECT CONCAT(YEAR(FROM_UNIXTIME(time/1000)), '-', LPAD(MONTH(FROM_UNIXTIME(time/1000)), 2, '0')) AS Date, MAX(val_num) FROM log WHERE item_id = {item} GROUP BY Date ORDER BY Date ASC" -# query = f"SELECT time, MAX(val_num) FROM log WHERE item_id = {item} GROUP BY YEAR(FROM_UNIXTIME(time/1000)), MONTH(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# else: -# # query = f"SELECT CONCAT(YEAR(FROM_UNIXTIME(time/1000)), '-', LPAD(MONTH(FROM_UNIXTIME(time/1000)), 2, '0')) AS Date, MAX(val_num) FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(now(), INTERVAL {count} MONTH) GROUP BY Date ORDER BY Date ASC" -# query = f"SELECT time, MAX(val_num), DATE(FROM_UNIXTIME(time/1000)) as DATE FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(DATE_FORMAT(NOW() ,'%Y-%m-01'), INTERVAL {count} MONTH) GROUP BY YEAR(FROM_UNIXTIME(time/1000)), MONTH(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# -# _logger.warning(f'mysql.fetch_max_monthly_count result: {result}') -# -# value_list = [] -# for element in result: -# value_list.append([element['time'], element['MAX(val_num)']]) -# -# _logger.warning(f'mysql.fetch_max_monthly_count value_list: {value_list}') -# return value_list -# -# def fetch_avg_monthly_count(sh, item, count=None): -# _logger.warning(f"Die Userfunction 'fetch_avg_monthly_count' wurde aufgerufen mit item {item} and count {count}") -# -# if type(item) is str: -# item = get_item_id(item) -# if count is None: -# query = f"SELECT time, ROUND(AVG(val_num * duration) / AVG(duration),2) as AVG FROM log WHERE item_id = {item} GROUP BY YEAR(FROM_UNIXTIME(time/1000)), MONTH(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# else: -# query = f"SELECT time, ROUND(AVG(val_num * duration) / AVG(duration),2) as AVG FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(DATE_FORMAT(NOW() ,'%Y-%m-01'), INTERVAL {count} MONTH) GROUP BY YEAR(FROM_UNIXTIME(time/1000)), MONTH(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# -# value_list = [] -# for element in result: -# value_list.append([element['time'], element['AVG']]) -# -# _logger.warning(f'mysql.fetch_avg_monthly_count value_list: {value_list}') -# return value_list -# -# def fetch_min_max_monthly_count(sh, item, count=None): -# _logger.warning(f"Die Userfunction 'fetch_min_max_monthly_count' wurde aufgerufen mit item {item} and count {count}") -# -# if type(item) is str: -# item = get_item_id(item) -# if count is None: -# query = f"SELECT CONCAT(YEAR(FROM_UNIXTIME(time/1000)), '-', LPAD(MONTH(FROM_UNIXTIME(time/1000)), 2, '0')) AS Date, MAX(val_num), MIN(val_num) FROM log WHERE item_id = {item} GROUP BY Date ORDER BY Date DESC" -# else: -# query = f"SELECT CONCAT(YEAR(FROM_UNIXTIME(time/1000)), '-', LPAD(MONTH(FROM_UNIXTIME(time/1000)), 2, '0')) AS Date, MAX(val_num), MIN(val_num) FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(now(), INTERVAL {count} MONTH) GROUP BY Date ORDER BY Date DESC" -# -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# _logger.warning(f'mysql result: {result}') -# return result -# -# def fetch_min_max_monthly_year(sh, item, year=None): -# _logger.warning(f"Die Userfunction 'fetch_min_max_monthly_year' wurde aufgerufen mit item {item} and year {year}") -# -# if type(item) is str: -# item = get_item_id(item) -# if year is None: -# year = datetime.now().year -# -# query = f"SELECT CONCAT(YEAR(FROM_UNIXTIME(time/1000)), '-', LPAD(MONTH(FROM_UNIXTIME(time/1000)), 2, '0')) AS Date, MAX(val_num), MIN(val_num) FROM log WHERE item_id = {item} AND YEAR(FROM_UNIXTIME(time/1000)) = {year} GROUP BY Date ORDER BY Date DESC" -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# _logger.warning(f'mysql result: {result}') -# return result -# -# def fetch_min_weekly_count(sh, item, count=None): -# _logger.warning(f"Die Userfunction 'fetch_min_weekly_count' wurde aufgerufen mit item {item} and count {count}") -# -# if type(item) is str: -# item = get_item_id(item) -# if count is None: -# count = 51 -# query = f"SELECT time, MIN(val_num), DATE(FROM_UNIXTIME(time/1000)) as DATE FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(DATE_ADD(CURDATE(), INTERVAL - WEEKDAY(CURDATE()) DAY), INTERVAL {count} WEEK) GROUP BY YEAR(FROM_UNIXTIME(time/1000)), WEEK(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# -# value_list = [] -# for element in result: -# value_list.append([element['time'], element['MIN(val_num)']]) -# -# _logger.warning(f'mysql.fetch_min_weekly_count value_list: {value_list}') -# return value_list -# -# def fetch_max_weekly_count(sh, item, count=None): -# _logger.warning(f"Die Userfunction 'fetch_max_weekly_count' wurde aufgerufen mit item {item} and count {count}") -# -# if type(item) is str: -# item = get_item_id(item) -# if count is None: -# count = 51 -# query = f"SELECT time, MAX(val_num) FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(DATE_ADD(CURDATE(), INTERVAL - WEEKDAY(CURDATE()) DAY), INTERVAL {count} WEEK) GROUP BY YEAR(FROM_UNIXTIME(time/1000)), WEEK(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# -# value_list = [] -# for element in result: -# value_list.append([element['time'], element['MAX(val_num)']]) -# -# _logger.warning(f'mysql.fetch_max_weekly_count value_list: {value_list}') -# return value_list -# -# def fetch_avg_weekly_count(sh, item, count=None): -# _logger.warning(f"Die Userfunction 'fetch_avg_weekly_count' wurde aufgerufen mit item {item} and count {count}") -# -# if type(item) is str: -# item = get_item_id(item) -# if count is None: -# count = 51 -# query = f"SELECT time, ROUND(AVG(val_num * duration) / AVG(duration),2) as AVG FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(DATE_ADD(CURDATE(), INTERVAL - WEEKDAY(CURDATE()) DAY), INTERVAL {count} WEEK) GROUP BY YEAR(FROM_UNIXTIME(time/1000)), WEEK(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# -# value_list = [] -# for element in result: -# value_list.append([element['time'], element['AVG']]) -# -# _logger.warning(f'mysql.fetch_avg_weekly_count value_list: {value_list}') -# return value_list -# -# def fetch_min_max_weekly_count(sh, item, count=None): -# _logger.warning(f"Die Userfunction 'fetch_min_max_weekly_count' wurde aufgerufen mit item {item} and count {count}") -# -# if type(item) is str: -# item = get_item_id(item) -# if count is None: -# count = 51 -# query = f"SELECT time, MAX(val_num), MIN(val_num), DATE(FROM_UNIXTIME(time/1000)) as DATE FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(DATE_ADD(CURDATE(), INTERVAL - WEEKDAY(CURDATE()) DAY), INTERVAL {count} WEEK) GROUP BY YEAR(FROM_UNIXTIME(time/1000)), WEEK(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# _logger.warning(f'mysql result: {result}') -# return result -# -# def fetch_min_max_weekly_year(sh, item, year=None): -# _logger.warning(f"Die Userfunction 'fetch_min_max_weekly_year' wurde aufgerufen mit item {item} and year {year}") -# -# if type(item) is str: -# item = get_item_id(item) -# if year is None: -# year = datetime.now().year -# -# query = f"SELECT CONCAT(YEAR(FROM_UNIXTIME(time/1000)), '/', LPAD(WEEK(FROM_UNIXTIME(time/1000)), 2, '0')) AS Date, MAX(val_num), MIN(val_num) FROM log WHERE item_id = {item} AND YEAR(FROM_UNIXTIME(time/1000)) = {year} GROUP BY Date ORDER BY Date DESC" -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# _logger.warning(f'mysql result: {result}') -# return result -# -# def fetch_min_daily_count(sh, item, count=None): -# _logger.warning(f"Die Userfunction 'fetch_min_daily_count' wurde aufgerufen mit item {item} as type {type(item)} and count {count}") -# -# if type(item) is str: -# item = get_item_id(item) -# if count is None: -# count = 30 -# -# query = f"SELECT time, MIN(val_num) FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(now(), INTERVAL {count} DAY) GROUP BY DATE(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# -# value_list = [] -# for element in result: -# value_list.append([element['time'], element['MIN(val_num)']]) -# -# _logger.warning(f'mysql.fetch_min_daily_count value_list: {value_list}') -# return value_list -# -# def fetch_max_daily_count(sh, item, count=None): -# _logger.warning(f"Die Userfunction 'fetch_max_daily_count' wurde aufgerufen mit item {item} as type {type(item)} and count {count}") -# -# if type(item) is str: -# item = get_item_id(item) -# if count is None: -# count = 30 -# -# query = f"SELECT time, MAX(val_num) FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(now(), INTERVAL {count} DAY) GROUP BY DATE(FROM_UNIXTIME(time/1000)) ORDER BY time ASC" -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# -# -# value_list = [] -# for element in result: -# value_list.append([element['time'], element['MAX(val_num)']]) -# -# _logger.warning(f'mysql.fetch_max_daily_count value_list: {value_list}') -# return value_list -# -# def fetch_min_max_daily_count(sh, item, count=None): -# _logger.warning(f"Die Userfunction 'fetch_min_max_daily_count' wurde aufgerufen mit item {item} as type {type(item)} and count {count}") -# -# if type(item) is str: -# item = get_item_id(item) -# if count is None: -# count = 30 -# -# query = f"SELECT DATE(FROM_UNIXTIME(time/1000)) AS Date, MAX(val_num), MIN(val_num) FROM log WHERE item_id = {item} AND DATE(FROM_UNIXTIME(time/1000)) > DATE_SUB(now(), INTERVAL {count} DAY) GROUP BY Date ORDER BY Date DESC" -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# _logger.warning(f'mysql result: {result}') -# return result -# -# def fetch_min_max_daily_year(sh, item, year=None): -# _logger.warning(f"Die Userfunction 'fetch_min_max_daily_year' wurde aufgerufen mit item {item} and year {year}") -# -# if type(item) is str: -# item = get_item_id(item) -# if year is None: -# year = datetime.now().year -# -# query = f"SELECT DATE(FROM_UNIXTIME(time/1000)) AS Date, MAX(val_num), MIN(val_num) FROM log WHERE item_id = {item} AND YEAR(FROM_UNIXTIME(time/1000)) = {year} GROUP BY Date ORDER BY Date DESC" -# result = [] -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# finally: -# connection.close() -# _logger.warning(f'mysql result: {result}') -# return result -# -# def _fetch_query(self, query): -# -# self.logger.debug(f"'_fetch_query' has been called with query={query}") -# connection = self._connect_to_db() -# if connection: -# try: -# connection = connect_db(sh) -# with connection.cursor() as cursor: -# cursor.execute(query) -# result = cursor.fetchall() -# except Exception as e: -# self.logger.error(f"_fetch_query failed with error={e}") -# else: -# self.logger.debug(f'_fetch_query result={result}') -# return result -# finally: -# connection.close() +ALLOWED_QUERY_TIMEFRAMES = ['year', 'month', 'week', 'day', 'hour'] +ALLOWED_MINMAX_FUNCS = ['min', 'max', 'avg'] +ALL_ONCHANGE_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'minmax_heute_min', 'minmax_heute_max', 'minmax_woche_min', 'minmax_woche_max', 'minmax_monat_min', 'minmax_monat_max', 'minmax_jahr_min', 'minmax_jahr_max', 'tagesmitteltemperatur_heute'] +ALL_DAILY_ATTRIBUTES = ['verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3', 'zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_zaehlerstand_tag_30d', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d', 'kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage'] +ALL_WEEKLY_ATTRIBUTES = ['verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_rolling_12m_woche_minus1', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_verbrauch_woche_30w', 'serie_zaehlerstand_woche_30w'] +ALL_MONTHLY_ATTRIBUTES = ['verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_rolling_12m_monat_minus1', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m'] +ALL_YEARLY_ATTRIBUTES = ['verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_jahr_minus1', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] +ALL_NEED_PARAMS_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] +ALL_VERBRAUCH_ATTRIBUTES = ['verbrauch_heute', 'verbrauch_woche', 'verbrauch_monat', 'verbrauch_jahr', 'verbrauch_heute_minus1', 'verbrauch_heute_minus2', 'verbrauch_heute_minus3', 'verbrauch_heute_minus4', 'verbrauch_heute_minus5', 'verbrauch_heute_minus6', 'verbrauch_heute_minus7', 'verbrauch_woche_minus1', 'verbrauch_woche_minus2', 'verbrauch_woche_minus3', 'verbrauch_woche_minus4', 'verbrauch_monat_minus1', 'verbrauch_monat_minus2', 'verbrauch_monat_minus3', 'verbrauch_monat_minus4', 'verbrauch_monat_minus12', 'verbrauch_jahr_minus1', 'verbrauch_jahr_minus2', 'verbrauch_rolling_12m_heute_minus1', 'verbrauch_rolling_12m_woche_minus1', 'verbrauch_rolling_12m_monat_minus1', 'verbrauch_rolling_12m_jahr_minus1', 'verbrauch_jahreszeitraum_minus1', 'verbrauch_jahreszeitraum_minus2', 'verbrauch_jahreszeitraum_minus3'] +ALL_ZAEHLERSTAND_ATTRIBUTES = ['zaehlerstand_heute_minus1', 'zaehlerstand_heute_minus2', 'zaehlerstand_heute_minus3', 'zaehlerstand_woche_minus1', 'zaehlerstand_woche_minus2', 'zaehlerstand_woche_minus3', 'zaehlerstand_monat_minus1', 'zaehlerstand_monat_minus2', 'zaehlerstand_monat_minus3', 'zaehlerstand_jahr_minus1', 'zaehlerstand_jahr_minus2', 'zaehlerstand_jahr_minus3'] +ALL_HISTORIE_ATTRIBUTES = ['minmax_last_24h_min', 'minmax_last_24h_max', 'minmax_last_24h_avg', 'minmax_last_7d_min', 'minmax_last_7d_max', 'minmax_last_7d_avg', 'minmax_heute_min', 'minmax_heute_max', 'minmax_heute_minus1_min', 'minmax_heute_minus1_max', 'minmax_heute_minus1_avg', 'minmax_heute_minus2_min', 'minmax_heute_minus2_max', 'minmax_heute_minus2_avg', 'minmax_heute_minus3_min', 'minmax_heute_minus3_max', 'minmax_heute_minus3_avg', 'minmax_woche_min', 'minmax_woche_max', 'minmax_woche_minus1_min', 'minmax_woche_minus1_max', 'minmax_woche_minus1_avg', 'minmax_woche_minus2_min', 'minmax_woche_minus2_max', 'minmax_woche_minus2_avg', 'minmax_monat_min', 'minmax_monat_max', 'minmax_monat_minus1_min', 'minmax_monat_minus1_max', 'minmax_monat_minus1_avg', 'minmax_monat_minus2_min', 'minmax_monat_minus2_max', 'minmax_monat_minus2_avg', 'minmax_jahr_min', 'minmax_jahr_max', 'minmax_jahr_minus1_min', 'minmax_jahr_minus1_max', 'minmax_jahr_minus1_avg'] +ALL_TAGESMITTEL_ATTRIBUTES = ['tagesmitteltemperatur_heute', 'tagesmitteltemperatur_heute_minus1', 'tagesmitteltemperatur_heute_minus2', 'tagesmitteltemperatur_heute_minus3'] +ALL_SERIE_ATTRIBUTES = ['serie_minmax_monat_min_15m', 'serie_minmax_monat_max_15m', 'serie_minmax_monat_avg_15m', 'serie_minmax_woche_min_30w', 'serie_minmax_woche_max_30w', 'serie_minmax_woche_avg_30w', 'serie_minmax_tag_min_30d', 'serie_minmax_tag_max_30d', 'serie_minmax_tag_avg_30d', 'serie_verbrauch_tag_30d', 'serie_verbrauch_woche_30w', 'serie_verbrauch_monat_18m', 'serie_zaehlerstand_tag_30d', 'serie_zaehlerstand_woche_30w', 'serie_zaehlerstand_monat_18m', 'serie_waermesumme_monat_24m', 'serie_kaeltesumme_monat_24m', 'serie_tagesmittelwert_stunde_0d', 'serie_tagesmittelwert_tag_stunde_30d'] +ALL_GEN_ATTRIBUTES = ['general_oldest_value', 'general_oldest_log'] +ALL_COMPLEX_ATTRIBUTES = ['kaeltesumme', 'waermesumme', 'gruenlandtempsumme', 'tagesmitteltemperatur', 'wachstumsgradtage', 'db_request'] + + +""" + 'serie_minmax_monat_min_15m': {'func': 'min', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, + 'serie_minmax_monat_max_15m': {'func': 'max', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, + 'serie_minmax_monat_avg_15m': {'func': 'avg', 'timeframe': 'month', 'start': 15, 'end': 0, 'group': 'month'}, + 'serie_minmax_woche_min_30w': {'func': 'min', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, + 'serie_minmax_woche_max_30w': {'func': 'max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, + 'serie_minmax_woche_avg_30w': {'func': 'avg', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, + 'serie_minmax_tag_min_30d': {'func': 'min', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, + 'serie_minmax_tag_max_30d': {'func': 'max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, + 'serie_minmax_tag_avg_30d': {'func': 'avg', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, + 'serie_verbrauch_tag_30d': {'func': 'diff_max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, + 'serie_verbrauch_woche_30w': {'func': 'diff_max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, + 'serie_verbrauch_monat_18m': {'func': 'diff_max', 'timeframe': 'month', 'start': 18, 'end': 0, 'group': 'month'}, + 'serie_zaehlerstand_tag_30d': {'func': 'max', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'day'}, + 'serie_zaehlerstand_woche_30w': {'func': 'max', 'timeframe': 'week', 'start': 30, 'end': 0, 'group': 'week'}, + 'serie_zaehlerstand_monat_18m': {'func': 'max', 'timeframe': 'month', 'start': 18, 'end': 0, 'group': 'month'}, + 'serie_waermesumme_monat_24m': {'func': 'sum_max', 'timeframe': 'month', 'start': 24, 'end': 0, 'group': 'day', 'group2': 'month'}, + 'serie_kaeltesumme_monat_24m': {'func': 'sum_min_neg', 'timeframe': 'month', 'start': 24, 'end': 0, 'group': 'day', 'group2': 'month'}, + 'serie_tagesmittelwert_0d': {'func': 'max', 'timeframe': 'year', 'start': 0, 'end': 0, 'group': 'day'}, + 'serie_tagesmittelwert_stunde_0d': {'func': 'avg1', 'timeframe': 'day', 'start': 0, 'end': 0, 'group': 'hour', 'group2': 'day'}, + 'serie_tagesmittelwert_stunde_30d': {'func': 'avg1', 'timeframe': 'day', 'start': 30, 'end': 0, 'group': 'hour', 'group2': 'day'}, + 'gts': {'func': 'max', 'timeframe': 'year', 'start': None, 'end': None, 'group': 'day'}, +""" diff --git a/db_addon/item_attributes_master.py b/db_addon/item_attributes_master.py new file mode 100644 index 000000000..9d010ce70 --- /dev/null +++ b/db_addon/item_attributes_master.py @@ -0,0 +1,194 @@ +# !/usr/bin/env python +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# Copyright 2023 Michael Wenzel +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# AVM for SmartHomeNG. https://github.com/smarthomeNG// +# +# This plugin is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This plugin is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this plugin. If not, see . +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +ITEM_ATTRIBUTS = { + 'DB_ADDON_FCTS': { + 'verbrauch_heute': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)'}, + 'verbrauch_woche': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch in der aktuellen Woche'}, + 'verbrauch_monat': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Monat'}, + 'verbrauch_jahr': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Verbrauch im aktuellen Jahr'}, + 'verbrauch_heute_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach)'}, + 'verbrauch_heute_minus2': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch vorgestern (heute -2 Tage)'}, + 'verbrauch_heute_minus3': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -3 Tage'}, + 'verbrauch_heute_minus4': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -4 Tage'}, + 'verbrauch_heute_minus5': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -5 Tage'}, + 'verbrauch_heute_minus6': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -6 Tage'}, + 'verbrauch_heute_minus7': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch heute -7 Tage'}, + 'verbrauch_woche_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch Vorwoche (aktuelle Woche -1)'}, + 'verbrauch_woche_minus2': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -2 Wochen'}, + 'verbrauch_woche_minus3': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -3 Wochen'}, + 'verbrauch_woche_minus4': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch aktuelle Woche -4 Wochen'}, + 'verbrauch_monat_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch Vormonat (aktueller Monat -1)'}, + 'verbrauch_monat_minus2': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -2 Monate'}, + 'verbrauch_monat_minus3': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -3 Monate'}, + 'verbrauch_monat_minus4': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -4 Monate'}, + 'verbrauch_monat_minus12': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch aktueller Monat -12 Monate'}, + 'verbrauch_jahr_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'verbrauch_jahr_minus2': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch aktuelles Jahr -2 Jahre'}, + 'verbrauch_rolling_12m_heute_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages'}, + 'verbrauch_rolling_12m_woche_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche'}, + 'verbrauch_rolling_12m_monat_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats'}, + 'verbrauch_rolling_12m_jahr_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres'}, + 'verbrauch_jahreszeitraum_minus1': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres'}, + 'verbrauch_jahreszeitraum_minus2': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren'}, + 'verbrauch_jahreszeitraum_minus3': {'cat': 'verbrauch', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren'}, + 'zaehlerstand_heute_minus1': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)'}, + 'zaehlerstand_heute_minus2': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)'}, + 'zaehlerstand_heute_minus3': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)'}, + 'zaehlerstand_woche_minus1': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche)'}, + 'zaehlerstand_woche_minus2': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen)'}, + 'zaehlerstand_woche_minus3': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen'}, + 'zaehlerstand_monat_minus1': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)'}, + 'zaehlerstand_monat_minus2': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)'}, + 'zaehlerstand_monat_minus3': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate'}, + 'zaehlerstand_jahr_minus1': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr)'}, + 'zaehlerstand_jahr_minus2': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre)'}, + 'zaehlerstand_jahr_minus3': {'cat': 'zaehler', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre'}, + 'minmax_last_24h_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 24h'}, + 'minmax_last_24h_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 24h'}, + 'minmax_last_24h_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 24h'}, + 'minmax_last_7d_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'minimaler Wert der letzten 7 Tage'}, + 'minmax_last_7d_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'maximaler Wert der letzten 7 Tage'}, + 'minmax_last_7d_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'durchschnittlicher Wert der letzten 7 Tage'}, + 'minmax_heute_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Tagesbeginn'}, + 'minmax_heute_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Tagesbeginn'}, + 'minmax_heute_minus1_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert gestern (heute -1 Tag)'}, + 'minmax_heute_minus1_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert gestern (heute -1 Tag)'}, + 'minmax_heute_minus1_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert gestern (heute -1 Tag)'}, + 'minmax_heute_minus2_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus2_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus2_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert vorgestern (heute -2 Tage)'}, + 'minmax_heute_minus3_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Minimalwert heute vor 3 Tagen'}, + 'minmax_heute_minus3_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Maximalwert heute vor 3 Tagen'}, + 'minmax_heute_minus3_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Durchschnittswert heute vor 3 Tagen'}, + 'minmax_woche_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Wochenbeginn'}, + 'minmax_woche_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Wochenbeginn'}, + 'minmax_woche_minus1_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus1_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus1_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert Vorwoche (aktuelle Woche -1)'}, + 'minmax_woche_minus2_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Minimalwert aktuelle Woche -2 Wochen'}, + 'minmax_woche_minus2_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Maximalwert aktuelle Woche -2 Wochen'}, + 'minmax_woche_minus2_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'weekly', 'params': False, 'description': 'Durchschnittswert aktuelle Woche -2 Wochen'}, + 'minmax_monat_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Monatsbeginn'}, + 'minmax_monat_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Monatsbeginn'}, + 'minmax_monat_minus1_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus1_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus1_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert Vormonat (aktueller Monat -1)'}, + 'minmax_monat_minus2_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Minimalwert aktueller Monat -2 Monate'}, + 'minmax_monat_minus2_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Maximalwert aktueller Monat -2 Monate'}, + 'minmax_monat_minus2_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'monthly', 'params': False, 'description': 'Durchschnittswert aktueller Monat -2 Monate'}, + 'minmax_jahr_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Minimalwert seit Jahresbeginn'}, + 'minmax_jahr_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Maximalwert seit Jahresbeginn'}, + 'minmax_jahr_minus1_min': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'minmax_jahr_minus1_max': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'minmax_jahr_minus1_avg': {'cat': 'wertehistorie', 'item_type': 'num', 'calc': 'yearly', 'params': False, 'description': 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr)'}, + 'tagesmitteltemperatur_heute': {'cat': 'tagesmittel', 'item_type': 'num', 'calc': 'onchange', 'params': False, 'description': 'Tagesmitteltemperatur heute'}, + 'tagesmitteltemperatur_heute_minus1': {'cat': 'tagesmittel', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)'}, + 'tagesmitteltemperatur_heute_minus2': {'cat': 'tagesmittel', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)'}, + 'tagesmitteltemperatur_heute_minus3': {'cat': 'tagesmittel', 'item_type': 'num', 'calc': 'daily', 'params': False, 'description': 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)'}, + 'serie_minmax_monat_min_15m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Minimalwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_monat_max_15m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Maximalwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_monat_avg_15m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatlicher Mittelwert der letzten 15 Monate (gleitend)'}, + 'serie_minmax_woche_min_30w': {'cat': 'serie', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_woche_max_30w': {'cat': 'serie', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_woche_avg_30w': {'cat': 'serie', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend)'}, + 'serie_minmax_tag_min_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Minimalwert der letzten 30 Tage (gleitend)'}, + 'serie_minmax_tag_max_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Maximalwert der letzten 30 Tage (gleitend)'}, + 'serie_minmax_tag_avg_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'täglicher Mittelwert der letzten 30 Tage (gleitend)'}, + 'serie_verbrauch_tag_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Verbrauch pro Tag der letzten 30 Tage'}, + 'serie_verbrauch_woche_30w': {'cat': 'serie', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Verbrauch pro Woche der letzten 30 Wochen'}, + 'serie_verbrauch_monat_18m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Verbrauch pro Monat der letzten 18 Monate'}, + 'serie_zaehlerstand_tag_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Zählerstand am Tagesende der letzten 30 Tage'}, + 'serie_zaehlerstand_woche_30w': {'cat': 'serie', 'item_type': 'list', 'calc': 'weekly', 'params': False, 'description': 'Zählerstand am Wochenende der letzten 30 Wochen'}, + 'serie_zaehlerstand_monat_18m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'Zählerstand am Monatsende der letzten 18 Monate'}, + 'serie_waermesumme_monat_24m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Wärmesumme der letzten 24 Monate'}, + 'serie_kaeltesumme_monat_24m': {'cat': 'serie', 'item_type': 'list', 'calc': 'monthly', 'params': False, 'description': 'monatliche Kältesumme der letzten 24 Monate'}, + 'serie_tagesmittelwert_stunde_0d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert für den aktuellen Tag'}, + 'serie_tagesmittelwert_tag_stunde_30d': {'cat': 'serie', 'item_type': 'list', 'calc': 'daily', 'params': False, 'description': 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde'}, + 'general_oldest_value': {'cat': 'gen', 'item_type': 'num ', 'calc': False, 'params': False, 'description': 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut'}, + 'general_oldest_log': {'cat': 'gen', 'item_type': 'list', 'calc': False, 'params': False, 'description': 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut'}, + 'kaeltesumme': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, + 'waermesumme': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional)'}, + 'gruenlandtempsumme': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=mandatory)'}, + 'tagesmitteltemperatur': {'cat': 'complex', 'item_type': 'list', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer)'}, + 'wachstumsgradtage': {'cat': 'complex', 'item_type': 'num', 'calc': 'daily', 'params': True, 'description': 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (threshold=Schwellentemperatur)'}, + 'db_request': {'cat': 'complex', 'item_type': 'list', 'calc': 'group', 'params': True, 'description': 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)'}, + }, + 'DB_ADDON_INFO': { + 'db_version': {'cat': 'info', 'item_type': 'str', 'calc': False, 'params': False, 'description': 'Version der verbundenen Datenbank'}, + }, + 'DB_ADDON_ADMIN': { + 'suspend': {'cat': 'admin', 'item_type': 'bool', 'calc': False, 'params': False, 'description': 'Unterbricht die Aktivitäten des Plugin'}, + 'recalc_all': {'cat': 'admin', 'item_type': 'bool', 'calc': False, 'params': False, 'description': 'Startet einen Neuberechnungslauf aller on-demand Items'}, + 'clean_cache_values': {'cat': 'admin', 'item_type': 'bool', 'calc': False, 'params': False, 'description': 'Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte'}, + }, +} + + +def get_attrs(sub_dict: dict = {}) -> list: + attributes = [] + for entry in ITEM_ATTRIBUTS: + for db_addon_fct in ITEM_ATTRIBUTS[entry]: + if sub_dict.items() <= ITEM_ATTRIBUTS[entry][db_addon_fct].items(): + attributes.append(db_addon_fct) + return attributes + + +def export_db_addon_data(): + ATTRS = {} + ATTRS['ALL_ONCHANGE_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'onchange'}) + ATTRS['ALL_DAILY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'daily'}) + ATTRS['ALL_WEEKLY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'weekly'}) + ATTRS['ALL_MONTHLY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'monthly'}) + ATTRS['ALL_YEARLY_ATTRIBUTES'] = get_attrs(sub_dict={'calc': 'yearly'}) + ATTRS['ALL_NEED_PARAMS_ATTRIBUTES'] = get_attrs(sub_dict={'params': True}) + ATTRS['ALL_VERBRAUCH_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'verbrauch'}) + ATTRS['ALL_ZAEHLERSTAND_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'zaehler'}) + ATTRS['ALL_HISTORIE_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'wertehistorie'}) + ATTRS['ALL_TAGESMITTEL_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'tagesmittel'}) + ATTRS['ALL_SERIE_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'serie'}) + ATTRS['ALL_GEN_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'gen'}) + ATTRS['ALL_COMPLEX_ATTRIBUTES'] = get_attrs(sub_dict={'cat': 'complex'}) + + for attr, alist in ATTRS.items(): + print(f'{attr} = {alist!r}') + + +def export_for_plugin_yaml(): + for entry in ITEM_ATTRIBUTS: + print(f'{entry}:') + print('valid_list:') + for func in ITEM_ATTRIBUTS[entry]: + print(f" - '{func}'") + + for title in ['description', 'item_type', 'calc']: + print(f'valid_list_{entry}:') + for func in ITEM_ATTRIBUTS[entry]: + print(f" - '{ITEM_ATTRIBUTS[entry][func][title]}'") + print() + + +if __name__ == '__main__': + export_db_addon_data() + print() + print('--------------------------------------------------------------') + print() + export_for_plugin_yaml() diff --git a/db_addon/plugin.yaml b/db_addon/plugin.yaml index 7aeb65439..85d8d3c5f 100644 --- a/db_addon/plugin.yaml +++ b/db_addon/plugin.yaml @@ -11,7 +11,7 @@ plugin: # keywords: iot xyz # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1848494-support-thread-databaseaddon-plugin - version: 1.0.0 # Plugin version (must match the version specified in __init__.py) + version: 1.1.0 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.8 # minimum Python version to use for this plugin @@ -60,435 +60,449 @@ item_attributes: de: 'Auswertefunktion des DB-Addon Plugins' en: 'Evaluation Function of DB-Addon Plugins' valid_list: - # Verbrauch - - 'verbrauch_heute' #num onchange Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages) - - 'verbrauch_woche' #num onchange Verbrauch in der aktuellen Woche - - 'verbrauch_monat' #num onchange Verbrauch im aktuellen Monat - - 'verbrauch_jahr' #num onchange Verbrauch im aktuellen Jahr - - 'verbrauch_heute_minus1' #num daily Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach) - - 'verbrauch_heute_minus2' #num daily Verbrauch vorgestern (heute -2 Tage) - - 'verbrauch_heute_minus3' #num daily Verbrauch heute -3 Tage - - 'verbrauch_heute_minus4' #num daily Verbrauch heute -4 Tage - - 'verbrauch_heute_minus5' #num daily Verbrauch heute -5 Tage - - 'verbrauch_heute_minus6' #num daily Verbrauch heute -6 Tage - - 'verbrauch_heute_minus7' #num daily Verbrauch heute -7 Tage - - 'verbrauch_woche_minus1' #num weekly Verbrauch Vorwoche (aktuelle Woche -1) - - 'verbrauch_woche_minus2' #num weekly Verbrauch aktuelle Woche -2 Wochen - - 'verbrauch_woche_minus3' #num weekly Verbrauch aktuelle Woche -3 Wochen - - 'verbrauch_woche_minus4' #num weekly Verbrauch aktuelle Woche -4 Wochen - - 'verbrauch_monat_minus1' #num monthly Verbrauch Vormonat (aktueller Monat -1) - - 'verbrauch_monat_minus2' #num monthly Verbrauch aktueller Monat -2 Monate - - 'verbrauch_monat_minus3' #num monthly Verbrauch aktueller Monat -3 Monate - - 'verbrauch_monat_minus4' #num monthly Verbrauch aktueller Monat -4 Monate - - 'verbrauch_monat_minus12' #num monthly Verbrauch aktueller Monat -12 Monate - - 'verbrauch_jahr_minus1' #num yearly Verbrauch Vorjahr (aktuelles Jahr -1 Jahr) - - 'verbrauch_jahr_minus2' #num yearly Verbrauch aktuelles Jahr -2 Jahre - - 'verbrauch_rolling_12m_heute_minus1' #num daily Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages - - 'verbrauch_rolling_12m_woche_minus1' #num weekly Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche - - 'verbrauch_rolling_12m_monat_minus1' #num monthly Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats - - 'verbrauch_rolling_12m_jahr_minus1' #num yearly Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres - - 'verbrauch_jahreszeitraum_minus1' #num daily Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres - - 'verbrauch_jahreszeitraum_minus2' #num daily Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren - - 'verbrauch_jahreszeitraum_minus3' #num daily Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren - # Zaehlerstand - - 'zaehlerstand_heute_minus1' #num daily Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag) - - 'zaehlerstand_woche_minus1' #num weekly Zählerstand / Wert am Ende der letzten Woche (aktuelle Woche -1 Woche) - - 'zaehlerstand_woche_minus2' #num weekly Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Woche) - - 'zaehlerstand_woche_minus3' #num weekly Zählerstand / Wert am Ende der aktuellen Woche -3 Woche - - 'zaehlerstand_monat_minus1' #num monthly Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat) - - 'zaehlerstand_monat_minus2' #num monthly Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate) - - 'zaehlerstand_monat_minus3' #num monthly Zählerstand / Wert am Ende des aktuellen Monats -3 Monate - - 'zaehlerstand_jahr_minus1' #num yearly Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr) - - 'zaehlerstand_jahr_minus2' #num yearly Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre) - - 'zaehlerstand_jahr_minus3' #num yearly Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre - # Wertehistorie min/max - - 'minmax_last_24h_min' #num daily minimaler Wert der letzten 24h - - 'minmax_last_24h_max' #num daily maximaler Wert der letzten 24h - - 'minmax_last_24h_avg' #num daily durchschnittlicher Wert der letzten 24h - - 'minmax_last_7d_min' #num daily minimaler Wert der letzten 7 Tage - - 'minmax_last_7d_max' #num daily maximaler Wert der letzten 7 Tage - - 'minmax_last_7d_avg' #num daily durchschnittlicher Wert der letzten 7 Tage - - 'minmax_heute_min' #num onchange Minimalwert seit Tagesbeginn - - 'minmax_heute_max' #num onchange Maximalwert seit Tagesbeginn - - 'minmax_heute_minus1_min' #num daily Minimalwert gestern (heute -1 Tag) - - 'minmax_heute_minus1_max' #num daily Maximalwert gestern (heute -1 Tag) - - 'minmax_heute_minus1_avg' #num daily Durchschnittswert gestern (heute -1 Tag) - - 'minmax_heute_minus2_min' #num daily Minimalwert vorgestern (heute -2 Tage) - - 'minmax_heute_minus2_max' #num daily Maximalwert vorgestern (heute -2 Tage) - - 'minmax_heute_minus2_avg' #num daily Durchschnittswert vorgestern (heute -2 Tage) - - 'minmax_heute_minus3_min' #num daily Minimalwert heute vor 3 Tagen - - 'minmax_heute_minus3_max' #num daily Maximalwert heute vor 3 Tagen - - 'minmax_heute_minus3_avg' #num daily Durchschnittswert heute vor 3 Tagen - - 'minmax_woche_min' #num onchange Minimalwert seit Wochenbeginn - - 'minmax_woche_max' #num onchange Maximalwert seit Wochenbeginn - - 'minmax_woche_minus1_min' #num weekly Minimalwert Vorwoche (aktuelle Woche -1) - - 'minmax_woche_minus1_max' #num weekly Maximalwert Vorwoche (aktuelle Woche -1) - - 'minmax_woche_minus1_avg' #num weekly Durchschnittswert Vorwoche (aktuelle Woche -1) - - 'minmax_woche_minus2_min' #num weekly Minimalwert aktuelle Woche -2 Wochen - - 'minmax_woche_minus2_max' #num weekly Maximalwert aktuelle Woche -2 Wochen - - 'minmax_woche_minus2_avg' #num weekly Durchschnittswert aktuelle Woche -2 Wochen - - 'minmax_monat_min' #num onchange Minimalwert seit Monatsbeginn - - 'minmax_monat_max' #num onchange Maximalwert seit Monatsbeginn - - 'minmax_monat_minus1_min' #num monthly Minimalwert Vormonat (aktueller Monat -1) - - 'minmax_monat_minus1_max' #num monthly Maximalwert Vormonat (aktueller Monat -1) - - 'minmax_monat_minus1_avg' #num monthly Durchschnittswert Vormonat (aktueller Monat -1) - - 'minmax_monat_minus2_min' #num monthly Minimalwert aktueller Monat -2 Monate - - 'minmax_monat_minus2_max' #num monthly Maximalwert aktueller Monat -2 Monate - - 'minmax_monat_minus2_avg' #num monthly Durchschnittswert aktueller Monat -2 Monate - - 'minmax_jahr_min' #num onchange Minimalwert seit Jahresbeginn - - 'minmax_jahr_max' #num onchange Maximalwert seit Jahresbeginn - - 'minmax_jahr_minus1_min' #num yearly Minimalwert Vorjahr (aktuelles Jahr -1 Jahr) - - 'minmax_jahr_minus1_max' #num yearly Maximalwert Vorjahr (aktuelles Jahr -1 Jahr) - - 'minmax_jahr_minus1_avg' #num yearly Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr) - # Serie - - 'serie_minmax_monat_min_15m' #list monthly monatlicher Minimalwert der letzten 15 Monate (gleitend) - - 'serie_minmax_monat_max_15m' #list monthly monatlicher Maximalwert der letzten 15 Monate (gleitend) - - 'serie_minmax_monat_avg_15m' #list monthly monatlicher Mittelwert der letzten 15 Monate (gleitend) - - 'serie_minmax_woche_min_30w' #list weekly wöchentlicher Minimalwert der letzten 30 Wochen (gleitend) - - 'serie_minmax_woche_max_30w' #list weekly wöchentlicher Maximalwert der letzten 30 Wochen (gleitend) - - 'serie_minmax_woche_avg_30w' #list weekly wöchentlicher Mittelwert der letzten 30 Wochen (gleitend) - - 'serie_minmax_tag_min_30d' #list daily täglicher Minimalwert der letzten 30 Tage (gleitend) - - 'serie_minmax_tag_max_30d' #list daily täglicher Maximalwert der letzten 30 Tage (gleitend) - - 'serie_minmax_tag_avg_30d' #list daily täglicher Mittelwert der letzten 30 Tage (gleitend) - - 'serie_verbrauch_tag_30d' #list daily Verbrauch pro Tag der letzten 30 Tage - - 'serie_verbrauch_woche_30w' #list weekly Verbrauch pro Woche der letzten 30 Wochen - - 'serie_verbrauch_monat_18m' #list monthly Verbrauch pro Monat der letzten 18 Monate - - 'serie_zaehlerstand_tag_30d' #list daily Zählerstand am Tagesende der letzten 30 Tage - - 'serie_zaehlerstand_woche_30w' #list weekly Zählerstand am Wochenende der letzten 30 Wochen - - 'serie_zaehlerstand_monat_18m' #list monthly Zählerstand am Monatsende der letzten 18 Monate - - 'serie_waermesumme_monat_24m' #list monthly monatliche Wärmesumme der letzten 24 Monate - - 'serie_kaeltesumme_monat_24m' #list monthly monatliche Kältesumme der letzten 24 Monate - - 'serie_tagesmittelwert_stunde_0d' #list daily Stundenmittelwert für den aktuellen Tag - - 'serie_tagesmittelwert_tag_stunde_30d' #list daily Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde - # Allgemein - - 'general_oldest_value' #num ------ Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut - - 'general_oldest_log' #list ------ Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut - # Komplex Hinweis: db_addon_params needed - - 'kaeltesumme' #num daily Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional) - - 'waermesumme' #num daily Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory, month=optional) - - 'gruenlandtempsumme' #num daily Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=mandatory) - - 'tagesmitteltemperatur' #list daily Berechnet die Tagesmitteltemperatur auf basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (days=optional) - - 'db_request' #list 'group' Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional): - valid_list_description: # Beschreibung -> notwendiger Item-Type - # Verbrauch - - 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages) -> num \n - Berechnungszyklus = onchange' - - 'Verbrauch in der aktuellen Woche -> num \n - Berechnungszyklus = onchange' - - 'Verbrauch im aktuellen Monat -> num \n - Berechnungszyklus = onchange' - - 'Verbrauch im aktuellen Jahr -> num \n - Berechnungszyklus = onchange' - - 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach) -> num \n - Berechnungszyklus = täglich' - - 'Verbrauch vorgestern (heute -2 Tage) -> num \n - Berechnungszyklus = täglich' - - 'Verbrauch heute -3 Tage -> num \n - Berechnungszyklus = täglich' - - 'Verbrauch heute -4 Tage -> num \n - Berechnungszyklus = täglich' - - 'Verbrauch heute -5 Tage -> num \n - Berechnungszyklus = täglich' - - 'Verbrauch heute -6 Tage -> num \n - Berechnungszyklus = täglich' - - 'Verbrauch heute -7 Tage -> num \n - Berechnungszyklus = täglich' - - 'Verbrauch Vorwoche (aktuelle Woche -1) -> num \n - Berechnungszyklus = wöchentlich' - - 'Verbrauch aktuelle Woche -2 Wochen) -> num \n - Berechnungszyklus = wöchentlich' - - 'Verbrauch aktuelle Woche -3 Wochen) -> num \n - Berechnungszyklus = wöchentlich' - - 'Verbrauch aktuelle Woche -4 Wochen) -> num \n - Berechnungszyklus = wöchentlich' - - 'Verbrauch Vormonat (aktueller Monat -1) -> num \n - Berechnungszyklus = monatlich' - - 'Verbrauch aktueller Monat -2 Monate -> num \n - Berechnungszyklus = monatlich' - - 'Verbrauch aktueller Monat -3 Monate -> num \n - Berechnungszyklus = monatlich' - - 'Verbrauch aktueller Monat -4 Monate -> num \n - Berechnungszyklus = monatlich' - - 'Verbrauch aktueller Monat -12 Monate -> num \n - Berechnungszyklus = monatlich' - - 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr) -> num \n - Berechnungszyklus = jährlich' - - 'Verbrauch aktuelles Jahr -2 Jahre) -> num \n - Berechnungszyklus = jährlich' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages -> num \n - Berechnungszyklus = täglich' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche -> num \n - Berechnungszyklus = wöchentlich' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats -> num \n - Berechnungszyklus = monatlich' - - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres -> num \n - Berechnungszyklus = jährlich' - - 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres -> num \n - Berechnungszyklus = täglich' - - 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren -> num \n - Berechnungszyklus = täglich' - - 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren -> num \n - Berechnungszyklus = täglich' - # Zaehlerstand - - 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag) -> num \n - Berechnungszyklus = täglich' - - 'Zählerstand / Wert am Ende der letzten Woche (aktuelle Woche -1 Woche) -> num \n - Berechnungszyklus = wöchentlich' - - 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Woche) -> num \n - Berechnungszyklus = wöchentlich' - - 'Zählerstand / Wert am Ende der aktuellen Woche -3 Woche -> num \n - Berechnungszyklus = wöchentlich' - - 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)) -> num \n - Berechnungszyklus = monatlich' - - 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)) -> num \n - Berechnungszyklus = monatlich' - - 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate) -> num \n - Berechnungszyklus = monatlich' - - 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr) -> num \n - Berechnungszyklus = jährlich' - - 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre) -> num \n - Berechnungszyklus = jährlich' - - 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre -> num \n - Berechnungszyklus = jährlich' - # Wertehistorie min/max - - 'minimaler Wert der letzten 24h -> num \n - Berechnungszyklus = täglich' - - 'maximaler Wert der letzten 24h -> num \n - Berechnungszyklus = täglich' - - 'durchschnittlicher Wert der letzten 24h -> num \n - Berechnungszyklus = täglich' - - 'minimaler Wert der letzten 7 Tage -> num \n - Berechnungszyklus = täglich' - - 'maximaler Wert der letzten 7 Tage -> num \n - Berechnungszyklus = täglich' - - 'durchschnittlicher Wert der letzten 7 Tage -> num \n - Berechnungszyklus = täglich' + - 'verbrauch_heute' + - 'verbrauch_woche' + - 'verbrauch_monat' + - 'verbrauch_jahr' + - 'verbrauch_heute_minus1' + - 'verbrauch_heute_minus2' + - 'verbrauch_heute_minus3' + - 'verbrauch_heute_minus4' + - 'verbrauch_heute_minus5' + - 'verbrauch_heute_minus6' + - 'verbrauch_heute_minus7' + - 'verbrauch_woche_minus1' + - 'verbrauch_woche_minus2' + - 'verbrauch_woche_minus3' + - 'verbrauch_woche_minus4' + - 'verbrauch_monat_minus1' + - 'verbrauch_monat_minus2' + - 'verbrauch_monat_minus3' + - 'verbrauch_monat_minus4' + - 'verbrauch_monat_minus12' + - 'verbrauch_jahr_minus1' + - 'verbrauch_jahr_minus2' + - 'verbrauch_rolling_12m_heute_minus1' + - 'verbrauch_rolling_12m_woche_minus1' + - 'verbrauch_rolling_12m_monat_minus1' + - 'verbrauch_rolling_12m_jahr_minus1' + - 'verbrauch_jahreszeitraum_minus1' + - 'verbrauch_jahreszeitraum_minus2' + - 'verbrauch_jahreszeitraum_minus3' + - 'zaehlerstand_heute_minus1' + - 'zaehlerstand_heute_minus2' + - 'zaehlerstand_heute_minus3' + - 'zaehlerstand_woche_minus1' + - 'zaehlerstand_woche_minus2' + - 'zaehlerstand_woche_minus3' + - 'zaehlerstand_monat_minus1' + - 'zaehlerstand_monat_minus2' + - 'zaehlerstand_monat_minus3' + - 'zaehlerstand_jahr_minus1' + - 'zaehlerstand_jahr_minus2' + - 'zaehlerstand_jahr_minus3' + - 'minmax_last_24h_min' + - 'minmax_last_24h_max' + - 'minmax_last_24h_avg' + - 'minmax_last_7d_min' + - 'minmax_last_7d_max' + - 'minmax_last_7d_avg' + - 'minmax_heute_min' + - 'minmax_heute_max' + - 'minmax_heute_minus1_min' + - 'minmax_heute_minus1_max' + - 'minmax_heute_minus1_avg' + - 'minmax_heute_minus2_min' + - 'minmax_heute_minus2_max' + - 'minmax_heute_minus2_avg' + - 'minmax_heute_minus3_min' + - 'minmax_heute_minus3_max' + - 'minmax_heute_minus3_avg' + - 'minmax_woche_min' + - 'minmax_woche_max' + - 'minmax_woche_minus1_min' + - 'minmax_woche_minus1_max' + - 'minmax_woche_minus1_avg' + - 'minmax_woche_minus2_min' + - 'minmax_woche_minus2_max' + - 'minmax_woche_minus2_avg' + - 'minmax_monat_min' + - 'minmax_monat_max' + - 'minmax_monat_minus1_min' + - 'minmax_monat_minus1_max' + - 'minmax_monat_minus1_avg' + - 'minmax_monat_minus2_min' + - 'minmax_monat_minus2_max' + - 'minmax_monat_minus2_avg' + - 'minmax_jahr_min' + - 'minmax_jahr_max' + - 'minmax_jahr_minus1_min' + - 'minmax_jahr_minus1_max' + - 'minmax_jahr_minus1_avg' + - 'tagesmitteltemperatur_heute' + - 'tagesmitteltemperatur_heute_minus1' + - 'tagesmitteltemperatur_heute_minus2' + - 'tagesmitteltemperatur_heute_minus3' + - 'serie_minmax_monat_min_15m' + - 'serie_minmax_monat_max_15m' + - 'serie_minmax_monat_avg_15m' + - 'serie_minmax_woche_min_30w' + - 'serie_minmax_woche_max_30w' + - 'serie_minmax_woche_avg_30w' + - 'serie_minmax_tag_min_30d' + - 'serie_minmax_tag_max_30d' + - 'serie_minmax_tag_avg_30d' + - 'serie_verbrauch_tag_30d' + - 'serie_verbrauch_woche_30w' + - 'serie_verbrauch_monat_18m' + - 'serie_zaehlerstand_tag_30d' + - 'serie_zaehlerstand_woche_30w' + - 'serie_zaehlerstand_monat_18m' + - 'serie_waermesumme_monat_24m' + - 'serie_kaeltesumme_monat_24m' + - 'serie_tagesmittelwert_stunde_0d' + - 'serie_tagesmittelwert_tag_stunde_30d' + - 'general_oldest_value' + - 'general_oldest_log' + - 'kaeltesumme' + - 'waermesumme' + - 'gruenlandtempsumme' + - 'tagesmitteltemperatur' + - 'wachstumsgradtage' + - 'db_request' + valid_list_description: + - 'Verbrauch am heutigen Tag (Differenz zwischen aktuellem Wert und den Wert am Ende des vorherigen Tages)' + - 'Verbrauch in der aktuellen Woche' + - 'Verbrauch im aktuellen Monat' + - 'Verbrauch im aktuellen Jahr' + - 'Verbrauch gestern (heute -1 Tag) (Differenz zwischen Wert am Ende des gestrigen Tages und dem Wert am Ende des Tages danach)' + - 'Verbrauch vorgestern (heute -2 Tage)' + - 'Verbrauch heute -3 Tage' + - 'Verbrauch heute -4 Tage' + - 'Verbrauch heute -5 Tage' + - 'Verbrauch heute -6 Tage' + - 'Verbrauch heute -7 Tage' + - 'Verbrauch Vorwoche (aktuelle Woche -1)' + - 'Verbrauch aktuelle Woche -2 Wochen' + - 'Verbrauch aktuelle Woche -3 Wochen' + - 'Verbrauch aktuelle Woche -4 Wochen' + - 'Verbrauch Vormonat (aktueller Monat -1)' + - 'Verbrauch aktueller Monat -2 Monate' + - 'Verbrauch aktueller Monat -3 Monate' + - 'Verbrauch aktueller Monat -4 Monate' + - 'Verbrauch aktueller Monat -12 Monate' + - 'Verbrauch Vorjahr (aktuelles Jahr -1 Jahr)' + - 'Verbrauch aktuelles Jahr -2 Jahre' + - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Tages' + - 'Verbrauch der letzten 12 Monate ausgehend im Ende der letzten Woche' + - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Monats' + - 'Verbrauch der letzten 12 Monate ausgehend im Ende des letzten Jahres' + - 'Verbrauch seit dem 1.1. bis zum heutigen Tag des Vorjahres' + - 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 2 Jahren' + - 'Verbrauch seit dem 1.1. bis zum heutigen Tag vor 3 Jahren' + - 'Zählerstand / Wert am Ende des letzten Tages (heute -1 Tag)' + - 'Zählerstand / Wert am Ende des vorletzten Tages (heute -2 Tag)' + - 'Zählerstand / Wert am Ende des vorvorletzten Tages (heute -3 Tag)' + - 'Zählerstand / Wert am Ende der vorvorletzten Woche (aktuelle Woche -1 Woche)' + - 'Zählerstand / Wert am Ende der vorletzten Woche (aktuelle Woche -2 Wochen)' + - 'Zählerstand / Wert am Ende der aktuellen Woche -3 Wochen' + - 'Zählerstand / Wert am Ende des letzten Monates (aktueller Monat -1 Monat)' + - 'Zählerstand / Wert am Ende des vorletzten Monates (aktueller Monat -2 Monate)' + - 'Zählerstand / Wert am Ende des aktuellen Monats -3 Monate' + - 'Zählerstand / Wert am Ende des letzten Jahres (aktuelles Jahr -1 Jahr)' + - 'Zählerstand / Wert am Ende des vorletzten Jahres (aktuelles Jahr -2 Jahre)' + - 'Zählerstand / Wert am Ende des aktuellen Jahres -3 Jahre' + - 'minimaler Wert der letzten 24h' + - 'maximaler Wert der letzten 24h' + - 'durchschnittlicher Wert der letzten 24h' + - 'minimaler Wert der letzten 7 Tage' + - 'maximaler Wert der letzten 7 Tage' + - 'durchschnittlicher Wert der letzten 7 Tage' - 'Minimalwert seit Tagesbeginn' - 'Maximalwert seit Tagesbeginn' - - 'Minimalwert gestern (heute -1 Tag) -> num \n - Berechnungszyklus = täglich' - - 'Maximalwert gestern (heute -1 Tag) -> num \n - Berechnungszyklus = täglich' - - 'Durchschnittswert gestern (heute -1 Tag) -> num \n - Berechnungszyklus = täglich' - - 'Minimalwert vorgestern (heute -2 Tage) -> num \n - Berechnungszyklus = täglich' - - 'Maximalwert vorgestern (heute -2 Tage) -> num \n - Berechnungszyklus = täglich' - - 'Durchschnittswert vorgestern (heute -2 Tage) -> num \n - Berechnungszyklus = täglich' - - 'Minimalwert heute vor 3 Tagen -> num \n - Berechnungszyklus = täglich' - - 'Maximalwert heute vor 3 Tagen -> num \n - Berechnungszyklus = täglich' - - 'Durchschnittswert heute vor 3 Tagen -> num \n - Berechnungszyklus = täglich' - - 'Minimalwert seit Wochenbeginn -> num \n - Berechnungszyklus = onchange' - - 'Maximalwert seit Wochenbeginn -> num \n - Berechnungszyklus = onchange' - - 'Minimalwert Vorwoche (aktuelle Woche -1) -> num \n - Berechnungszyklus = wöchentlich' - - 'Maximalwert Vorwoche (aktuelle Woche -1) -> num \n - Berechnungszyklus = wöchentlich' - - 'Durchschnittswert Vorwoche (aktuelle Woche -1) -> num \n - Berechnungszyklus = wöchentlich' - - 'Minimalwert aktuelle Woche -2 Wochen -> num \n - Berechnungszyklus = wöchentlich' - - 'Maximalwert aktuelle Woche -2 Wochen -> num \n - Berechnungszyklus = wöchentlich' - - 'Durchschnittswert aktuelle Woche -2 Wochen -> num \n - Berechnungszyklus = wöchentlich' - - 'Minimalwert seit Monatsbeginn -> num \n - Berechnungszyklus = onchange' - - 'Maximalwert seit Monatsbeginn -> num \n - Berechnungszyklus = onchange' - - 'Minimalwert Vormonat (aktueller Monat -1) -> num \n - Berechnungszyklus = monatlich' - - 'Maximalwert Vormonat (aktueller Monat -1) -> num \n - Berechnungszyklus = monatlich' - - 'Durchschnittswert Vormonat (aktueller Monat -1) -> num \n - Berechnungszyklus = monatlich' - - 'Minimalwert aktueller Monat -2 Monate -> num \n - Berechnungszyklus = monatlich' - - 'Maximalwert aktueller Monat -2 Monate -> num \n - Berechnungszyklus = monatlich' - - 'Durchschnittswert aktueller Monat -2 Monate -> num \n - Berechnungszyklus = monatlich' - - 'Minimalwert seit Jahresbeginn -> num \n - Berechnungszyklus = onchange' - - 'Maximalwert seit Jahresbeginn -> num \n - Berechnungszyklus = onchange' - - 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr) -> num \n - Berechnungszyklus = jährlich' - - 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr) -> num \n - Berechnungszyklus = jährlich' - - 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr) -> num \n - Berechnungszyklus = jährlich' - # Serie - - 'monatlicher Minimalwert der letzten 15 Monate (gleitend) -> list \n - Berechnungszyklus = monatlich' - - 'monatlicher Maximalwert der letzten 15 Monate (gleitend) -> list \n - Berechnungszyklus = monatlich' - - 'monatlicher Mittelwert der letzten 15 Monate (gleitend) -> list \n - Berechnungszyklus = monatlich' - - 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend) -> list \n - Berechnungszyklus = wöchentlich' - - 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend) -> list \n - Berechnungszyklus = wöchentlich' - - 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend) -> list \n - Berechnungszyklus = wöchentlich' - - 'täglicher Minimalwert der letzten 30 Tage (gleitend) -> list \n - Berechnungszyklus = täglich' - - 'täglicher Maximalwert der letzten 30 Tage (gleitend)) -> list \n - Berechnungszyklus = täglich' - - 'täglicher Mittelwert der letzten 30 Tage (gleitend)) -> list \n - Berechnungszyklus = täglich' - - 'Verbrauch pro Tag der letzten 30 Tage) -> list \n - Berechnungszyklus = täglich' - - 'Verbrauch pro Woche der letzten 30 Wochen) -> list \n - Berechnungszyklus = wöchentlich' - - 'Verbrauch pro Monat der letzten 18 Monate) -> list \n - Berechnungszyklus = monatlich' - - 'Zählerstand am Tagesende der letzten 30 Tage) -> list \n - Berechnungszyklus = täglich' - - 'Zählerstand am Wochenende der letzten 30 Wochen) -> list \n - Berechnungszyklus = wöchentlich' - - 'Zählerstand am Monatsende der letzten 18 Monate) -> list \n - Berechnungszyklus = monatlich' - - 'monatliche Wärmesumme der letzten 24 Monate) -> list \n - Berechnungszyklus = monatlich' - - 'monatliche Kältesumme der letzten 24 Monate) -> list \n - Berechnungszyklus = monatlich' - - 'Stundenmittelwert für den aktuellen Tag) -> list \n - Berechnungszyklus = täglich' - - 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde) -> list \n - Berechnungszyklus = täglich' - # Allgemein - - 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut -> num' - - 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut -> list' - # Komplex - - 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params sind für die Definition des Zeitraums notwendig (year=optional, month=optional) -> num \n - Berechnungszyklus = täglich' - - 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params sind für die Definition des Zeitraums notwendig (year=optional, month=optional) -> num \n - Berechnungszyklus = täglich' - - 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params sind für die Definition des Zeitraums notwendig (year=optional) siehe https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme -> num \n - Berechnungszyklus = täglich' - - 'Berechnet die Tagesmitteltemperatur auf basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (days=optional) -> num \n - Berechnungszyklus = täglich' - - 'Abfrage der DB mit db_addon_params (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional) -> foo \n - Berechnungszyklus = group' - + - 'Minimalwert gestern (heute -1 Tag)' + - 'Maximalwert gestern (heute -1 Tag)' + - 'Durchschnittswert gestern (heute -1 Tag)' + - 'Minimalwert vorgestern (heute -2 Tage)' + - 'Maximalwert vorgestern (heute -2 Tage)' + - 'Durchschnittswert vorgestern (heute -2 Tage)' + - 'Minimalwert heute vor 3 Tagen' + - 'Maximalwert heute vor 3 Tagen' + - 'Durchschnittswert heute vor 3 Tagen' + - 'Minimalwert seit Wochenbeginn' + - 'Maximalwert seit Wochenbeginn' + - 'Minimalwert Vorwoche (aktuelle Woche -1)' + - 'Maximalwert Vorwoche (aktuelle Woche -1)' + - 'Durchschnittswert Vorwoche (aktuelle Woche -1)' + - 'Minimalwert aktuelle Woche -2 Wochen' + - 'Maximalwert aktuelle Woche -2 Wochen' + - 'Durchschnittswert aktuelle Woche -2 Wochen' + - 'Minimalwert seit Monatsbeginn' + - 'Maximalwert seit Monatsbeginn' + - 'Minimalwert Vormonat (aktueller Monat -1)' + - 'Maximalwert Vormonat (aktueller Monat -1)' + - 'Durchschnittswert Vormonat (aktueller Monat -1)' + - 'Minimalwert aktueller Monat -2 Monate' + - 'Maximalwert aktueller Monat -2 Monate' + - 'Durchschnittswert aktueller Monat -2 Monate' + - 'Minimalwert seit Jahresbeginn' + - 'Maximalwert seit Jahresbeginn' + - 'Minimalwert Vorjahr (aktuelles Jahr -1 Jahr)' + - 'Maximalwert Vorjahr (aktuelles Jahr -1 Jahr)' + - 'Durchschnittswert Vorjahr (aktuelles Jahr -1 Jahr)' + - 'Tagesmitteltemperatur heute' + - 'Tagesmitteltemperatur des letzten Tages (heute -1 Tag)' + - 'Tagesmitteltemperatur des vorletzten Tages (heute -2 Tag)' + - 'Tagesmitteltemperatur des vorvorletzten Tages (heute -3 Tag)' + - 'monatlicher Minimalwert der letzten 15 Monate (gleitend)' + - 'monatlicher Maximalwert der letzten 15 Monate (gleitend)' + - 'monatlicher Mittelwert der letzten 15 Monate (gleitend)' + - 'wöchentlicher Minimalwert der letzten 30 Wochen (gleitend)' + - 'wöchentlicher Maximalwert der letzten 30 Wochen (gleitend)' + - 'wöchentlicher Mittelwert der letzten 30 Wochen (gleitend)' + - 'täglicher Minimalwert der letzten 30 Tage (gleitend)' + - 'täglicher Maximalwert der letzten 30 Tage (gleitend)' + - 'täglicher Mittelwert der letzten 30 Tage (gleitend)' + - 'Verbrauch pro Tag der letzten 30 Tage' + - 'Verbrauch pro Woche der letzten 30 Wochen' + - 'Verbrauch pro Monat der letzten 18 Monate' + - 'Zählerstand am Tagesende der letzten 30 Tage' + - 'Zählerstand am Wochenende der letzten 30 Wochen' + - 'Zählerstand am Monatsende der letzten 18 Monate' + - 'monatliche Wärmesumme der letzten 24 Monate' + - 'monatliche Kältesumme der letzten 24 Monate' + - 'Stundenmittelwert für den aktuellen Tag' + - 'Stundenmittelwert pro Tag der letzten 30 Tage (bspw. zur Berechnung der Tagesmitteltemperatur basierend auf den Mittelwert der Temperatur pro Stunde' + - 'Ausgabe des ältesten Wertes des entsprechenden "Parent-Items" mit database Attribut' + - 'Ausgabe des Timestamp des ältesten Eintrages des entsprechenden "Parent-Items" mit database Attribut' + - 'Berechnet die Kältesumme für einen Zeitraum, db_addon_params: (year=mandatory: int, month=optional: str)' + - 'Berechnet die Wärmesumme für einen Zeitraum, db_addon_params: (year=mandatory: int, month=optional: str, threshold=optional: int)' + - 'Berechnet die Grünlandtemperatursumme für einen Zeitraum, db_addon_params: (year=mandatory)' + - 'Berechnet die Tagesmitteltemperatur auf Basis der stündlichen Durchschnittswerte eines Tages für die angegebene Anzahl von Tagen (timeframe=day, count=integer)' + - 'Berechnet die Wachstumsgradtage auf Basis der stündlichen Durchschnittswerte eines Tages für das laufende Jahr mit an Angabe des Temperaturschwellenwertes (year=Jahr: int, method=0/1: int, threshold=Schwellentemperatur: int)' + - 'Abfrage der DB: db_addon_params: (func=mandatory, item=mandatory, timespan=mandatory, start=optional, end=optional, count=optional, group=optional, group2=optional)' valid_list_item_type: - # Verbrauch - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - # Zaehlerstand - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - # Wertehistorie min/max - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - - num - # Serie - - list - - list - - list - - list - - list - - list - - list - - list - - list - - list - - list - - list - - list - - list - - list - - list - - list - - list - - list - # Allgemein - - num - - list - # Komplex - - num - - num - - num - - list - - list + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'num' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'list' + - 'num ' + - 'list' + - 'num' + - 'num' + - 'num' + - 'list' + - 'num' + - 'list' + valid_list_calculation: + - 'onchange' + - 'onchange' + - 'onchange' + - 'onchange' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'weekly' + - 'weekly' + - 'weekly' + - 'weekly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'yearly' + - 'yearly' + - 'daily' + - 'weekly' + - 'monthly' + - 'yearly' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'weekly' + - 'weekly' + - 'weekly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'yearly' + - 'yearly' + - 'yearly' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'onchange' + - 'onchange' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'onchange' + - 'onchange' + - 'weekly' + - 'weekly' + - 'weekly' + - 'weekly' + - 'weekly' + - 'weekly' + - 'onchange' + - 'onchange' + - 'monthly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'onchange' + - 'onchange' + - 'yearly' + - 'yearly' + - 'yearly' + - 'onchange' + - 'daily' + - 'daily' + - 'daily' + - 'monthly' + - 'monthly' + - 'monthly' + - 'weekly' + - 'weekly' + - 'weekly' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'weekly' + - 'monthly' + - 'daily' + - 'weekly' + - 'monthly' + - 'monthly' + - 'monthly' + - 'daily' + - 'daily' + - 'False' + - 'False' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'daily' + - 'group' db_addon_info: type: str @@ -496,11 +510,11 @@ item_attributes: de: 'Info-Funktion des DB-Addon Plugins' en: 'Info-Function of DB-Addon Plugins' valid_list: - - 'db_version' #str ------ Version der verbundenen Datenbank + - 'db_version' valid_list_description: - - 'Version der verbundenen Datenbank -> str' + - 'Version der verbundenen Datenbank' valid_list_item_type: - - str + - 'str' db_addon_admin: type: str @@ -508,17 +522,17 @@ item_attributes: de: 'Admin-Funktion des DB-Addon Plugins' en: 'Admin-Function of DB-Addon Plugins' valid_list: - - 'suspend' #bool ------ unterbricht die Aktivitäten des Plugin - - 'recalc_all' #bool ------ Startet einen Neuberechnungslauf aller on-demand items - - 'clean_cache_values' #bool ------ Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte + - 'suspend' + - 'recalc_all' + - 'clean_cache_values' valid_list_description: - 'unterbricht die Aktivitäten des Plugin -> bool' - 'Startet einen Neuberechnungslauf aller on-demand items -> bool' - 'Löscht Plugin-Cache und damit alle im Plugin zwischengespeicherten Werte -> bool' valid_list_item_type: - - bool - - bool - - bool + - 'bool' + - 'bool' + - 'bool' db_addon_params: type: str @@ -538,6 +552,12 @@ item_attributes: de: 'Wert der bei Abfrage bzw. Auswertung der Datenbank für diese Item ignoriert werden soll' en: 'Value which will be ignored at database query' + db_addon_database_item: + type: str + description: + de: 'Optional: Pfad des zu verwendenden Items mit Database Attribut' + en: 'Optional: Path of item with database attribut to be used' + item_structs: verbrauch_1: name: Struct für Verbrauchsauswertung bei Zählern mit stetig ansteigendem Zählerstand (Teil 1) diff --git a/db_addon/user_doc.rst b/db_addon/user_doc.rst index 65ec9bfc4..74c774c71 100644 --- a/db_addon/user_doc.rst +++ b/db_addon/user_doc.rst @@ -21,10 +21,13 @@ Diese Auswertungen werden zyklisch zum Tageswechsel, Wochenwechsel, Monatswechse der Funktion erzeugt. Um die Zugriffe auf die Datenbank zu minimieren, werden diverse Daten zwischengespeichert. -Die Items mit einem DatabaseAddon-Attribut müssen im gleichen Pfad sein, wie das Item, für das das Database Attribut -konfiguriert ist. -Bedeutet: Die Items mit dem DatabaseAddon-Attribute müssen Kinder oder Kindeskinder oder Kindeskinderkinder des Items -sein, für das das Database Attribut konfiguriert ist +Sind Items mit einem DatabaseAddon-Attribut im gleichen Pfad, wie das Item, für das das Database Attribut +konfiguriert ist, wird dieses Item automatisch ermittelt. Bedeutet: Sind die Items mit dem DatabaseAddon-Attribute Kinder +oder Kindeskinder oder Kindeskinderkinder des Items, für das das Database Attribut konfiguriert ist, wird dieses automatisch +ermittelt. + +Alternativ kann mit dem Attribute "db_addon_database_item" auch der absolute Pfad des Items angegeben werden, für das +das Database Attribut konfiguriert ist. Bsp: @@ -46,6 +49,12 @@ Bsp: type: num db_addon_fct: heute_minus1_max + + tagesmitteltemperatur_gestern: + type: num + db_addon_fct: heute_minus1_avg + db_addon_database_item: 'temperatur' + | Anforderungen @@ -182,3 +191,87 @@ db_addon Maintenance Das Webinterface zeigt detaillierte Informationen über die im Plugin verfügbaren Daten an. Dies dient der Maintenance bzw. Fehlersuche. Dieser Tab ist nur bei Log-Level "Debug" verfügbar. + + +Erläuterungen zu Temperatursummen +================================= + + +Grünlandtemperatursumme +----------------------- + +Beim Grünland wird die Wärmesumme nach Ernst und Loeper benutzt, um den Vegetationsbeginn und somit den Termin von Düngungsmaßnahmen zu bestimmen. +Dabei erfolgt die Aufsummierung der Tagesmitteltemperaturen über 0 °C, wobei der Januar mit 0.5 und der Februar mit 0.75 gewichtet wird. +Bei einer Wärmesumme von 200 Grad ist eine Düngung angesagt. + +siehe: https://de.wikipedia.org/wiki/Gr%C3%BCnlandtemperatursumme + +Folgende Parameter sind möglich / notwendig: + +.. code-block:: yaml + db_addon_params: "year=current" + +- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') + + +Wachstumsgradtag +---------------- +Der Begriff Wachstumsgradtage (WGT) ist ein Überbegriff für verschiedene Größen. +Gemeinsam ist ihnen, daß zur Berechnung eine Lufttemperatur von einem Schwellenwert subtrahiert wird. +Je nach Fragestellung und Pflanzenart werden der Schwellenwert unterschiedlich gewählt und die Temperatur unterschiedlich bestimmt. +Verfügbar sind die Berechnung über 0) "einfachen Durchschnitt der Tagestemperaturen", 1) "modifizierten Durchschnitt der Tagestemperaturen" +und 2) Anzahl der Tage, deren Mitteltempertatur oberhalb der Schwellentemperatur lag. + +siehe https://de.wikipedia.org/wiki/Wachstumsgradtag + +Folgende Parameter sind möglich / notwendig: + +.. code-block:: yaml + db_addon_params: "year=current, method=1, threshold=10" + +- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') +- method: 0-Berechnung über "einfachen Durchschnitt der Tagestemperaturen", 1-Berechnung über "modifizierten Durchschnitt (default: 0) +der Tagestemperaturen" 2-Anzahl der Tage, mit Mitteltempertatur oberhalb Schwellentemperatur// 10, 11 Ausgabe aus Zeitserie +- threshold: Schwellentemperatur in °C (int) (default: 10) + + +Wärmesumme +---------- + +Die Wärmesumme soll eine Aussage über den Sommer und die Pflanzenreife liefern. Es gibt keine eindeutige Definition der Größe "Wärmesumme". +Berechnet wird die Wärmesumme als Summe aller Tagesmitteltemperaturen über einem Schwellenwert ab dem 1.1. des Jahres. + +siehe https://de.wikipedia.org/wiki/W%C3%A4rmesumme + +Folgende Parameter sind möglich / notwendig: + +.. code-block:: yaml + db_addon_params: "year=current, month=1, threshold=10" + +- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') +- month: Monat (int) des Jahres, für das die Berechnung ausgeführt werden soll (optional) (default: None) +- threshold: Schwellentemperatur in °C (int) (default: 10) + + +Kältesumme +---------- + +Die Kältesumme soll eine Aussage über die Härte des Winters liefern. +Berechnet wird die Kältesumme als Summe aller negativen Tagesmitteltemperaturen ab dem 21.9. des Jahres bis 31.3. des Folgejahres. + +siehe https://de.wikipedia.org/wiki/K%C3%A4ltesumme + +Folgende Parameter sind möglich / notwendig: + +.. code-block:: yaml + db_addon_params: "year=current, month=1" + +- year: Jahreszahl (str oder int), für das die Berechnung ausgeführt werden soll oder "current" für aktuelles Jahr (default: 'current') +- month: Monat (int) des Jahres, für das die Berechnung ausgeführt werden soll (optional) (default: None) + + +Tagesmitteltemperatur +--------------------- + +Die Tagesmitteltemperatur wird auf Basis der stündlichen Durchschnittswerte eines Tages (aller in der DB enthaltenen Datensätze) +für die angegebene Anzahl von Tagen (days=optional) berechnet. diff --git a/db_addon/webif/__init__.py b/db_addon/webif/__init__.py index 0397f8ab9..bd5c6c41f 100644 --- a/db_addon/webif/__init__.py +++ b/db_addon/webif/__init__.py @@ -104,7 +104,7 @@ def get_data_html(self, dataSet=None): data['plugin_suspended'] = self.plugin.suspended data['maintenance'] = True if self.plugin.log_level == 10 else False - data['queue_length'] = self.plugin.queue_backlog + data['queue_length'] = self.plugin.queue_backlog() data['active_queue_item'] = self.plugin.active_queue_item try: diff --git a/db_addon/webif/templates/index.html b/db_addon/webif/templates/index.html index 37688339b..31b2dd3a2 100644 --- a/db_addon/webif/templates/index.html +++ b/db_addon/webif/templates/index.html @@ -159,13 +159,9 @@ title: '{{ _('dict/list') }}', targets: [1], "className": "dict" }, - { - title: '{{ _('count') }}', - targets: [2], "className": "count" - }, { title: '{{ _('content') }}', - targets: [3], "className": "content" + targets: [2], "className": "content" }].concat($.fn.dataTable.defaults.columnDefs), pageLength: webif_pagelength, pageResize: resize}); @@ -267,7 +263,6 @@ {% endif %} {% set tab3title = "" ~ plugin_shortname ~ " API/Doku" %} - {% if item_count > 0 %} {% set start_tab = 1 %} @@ -278,218 +273,192 @@ {% block bodytab1 %} - -
- - - {% for item in items %} - - - - - - - - - - - - {% endfor %} - -
{{ item._path }}{{ item._type }}{{ p.get_item_config(item._path)['attribute'] }}{{ _(p.get_item_config(item)['cycle']|string) }}{% if p.get_item_config(item)['startup'] %}{{ _('Ja') }}{% else %}{{ _('Nein') }}{% endif %}.{{ item._value | float }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
-
+ + + {% for item in items %} + + + + + + + + + + + + {% endfor %} + +
{{ item._path }}{{ item._type }}{{ p.get_item_config(item._path)['db_addon_fct'] }}{{ _(p.get_item_config(item)['cycle']|string) }}{% if p.get_item_config(item)['startup'] %}{{ _('Ja') }}{% else %}{{ _('Nein') }}{% endif %}{{ item._value | float }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
{% endblock bodytab1 %} {% block bodytab2 %} -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
{{ _('00_items') }}{{ len(p.get_item_path_list('database_addon', True)) }}{{ p.get_item_path_list('database_addon', True) }}
{{ _('02_admin_items') }}{{ len(p.get_item_path_list('database_addon', 'admin')) }}{{ p.get_item_path_list('database_addon', 'admin') }}
{{ _('10_daily_items') }}{{ len(p._daily_items) }}{{ p._daily_items }}
{{ _('11_weekly_items') }}{{ len(p._weekly_items) }}{{ p._weekly_items }}
{{ _('12_monthly_items') }}{{ len(p._monthly_items) }}{{ p._monthly_items }}
{{ _('13_yearly_items') }}{{ len(p._yearly_items) }}{{ p._yearly_items }}
{{ _('14_onchange_items') }}{{ len(p._onchange_items) }}{{ p._onchange_items }}
{{ _('15_startup_items') }}{{ len(p._startup_items) }}{{ p._startup_items }}
{{ _('17_database_items') }}{{ len(p._database_items) }}{{ p._database_items }}
{{ _('16_static_items') }}{{ len(p._static_items) }}{{ p._static_items }}
{{ _('32_item_cache') }}{{ len(p.item_cache) }}{{ p.item_cache }}
{{ _('20_tageswert_dict') }}{{ len(p.current_values['day']) }}{{ p.current_values['day'] }}
{{ _('21_wochenwert_dict') }}{{ len(p.current_values['week']) }}{{ p.current_values['week'] }}
{{ _('22_monatswert_dict') }}{{ len(p.current_values['month']) }}{{ p.current_values['month'] }}
{{ _('23_jahreswert_dict') }}{{ len(p.current_values['year']) }}{{ p.current_values['year'] }}
{{ _('24_vortagsendwert_dict') }}{{ len(p.previous_values['day']) }}{{ p.previous_values['day'] }}
{{ _('25_vorwochenendwert_dict') }}{{ len(p.previous_values['week']) }}{{ p.previous_values['week'] }}
{{ _('26_vormonatsendwert_dict') }}{{ len(p.previous_values['month']) }}{{ p.previous_values['month'] }}
{{ _('27_vorjahresendwert_dict') }}{{ len(p.previous_values['year']) }}{{ p.previous_values['year'] }}
{{ _('get_item_list') }}{{ len(p.get_item_list('database_addon', True)) }}{{ p.get_item_list('database_addon', True) }}
{{ _('_plg_item_dict') }}{{ len(p._plg_item_dict) }}{{ p._plg_item_dict }}
{{ _('work_item_queue_thread') }}{{ len(p._plg_item_dict) }}{{ p.work_item_queue_thread.is_alive() }}
-
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
{{ _('00_items') }}{{ p.get_item_path_list('database_addon', True) }}
{{ _('02_admin_items') }}{{ p.get_item_path_list('database_addon', 'admin') }}
{{ _('10_daily_items') }}{{ p._daily_items() }}
{{ _('11_weekly_items') }}{{ p._weekly_items() }}
{{ _('12_monthly_items') }}{{ p._monthly_items() }}
{{ _('13_yearly_items') }}{{ p._yearly_items() }}
{{ _('14_onchange_items') }}{{ p._onchange_items() }}
{{ _('15_startup_items') }}{{ p._startup_items() }}
{{ _('17_database_items') }}{{ p._database_items() }}
{{ _('16_static_items') }}{{ p._static_items() }}
{{ _('32_item_cache') }}{{ p.item_cache }}
{{ _('20_tageswert_dict') }}{{ p.current_values['day'] }}
{{ _('21_wochenwert_dict') }}{{ p.current_values['week'] }}
{{ _('22_monatswert_dict') }}{{ p.current_values['month'] }}
{{ _('23_jahreswert_dict') }}{{ p.current_values['year'] }}
{{ _('24_vortagsendwert_dict') }}{{ p.previous_values['day'] }}
{{ _('25_vorwochenendwert_dict') }}{{ p.previous_values['week'] }}
{{ _('26_vormonatsendwert_dict') }}{{ p.previous_values['month'] }}
{{ _('27_vorjahresendwert_dict') }}{{ p.previous_values['year'] }}
{{ _('get_item_list') }}{{ p.get_item_list('database_addon', True) }}
{{ _('_plg_item_dict') }}{{ p._plg_item_dict }}
{{ _('work_item_queue_thread') }}{% if p.work_item_queue_thread != None %}{{ p.work_item_queue_thread.is_alive() }}{% endif %}
{% endblock bodytab2 %} {% block bodytab3 %} -
-

{{_('Item Attribute')}}

- {% for function, itemdefinitions_dict in p.metadata.itemdefinitions.items() %} -
-
- {{ function }}      {{('Beschreibung:')}} {{ itemdefinitions_dict['description'][language] }}       {{ ('Ergebnisdatentyp:')}} {{ itemdefinitions_dict['type'] }} -
- {% if 'valid_list' in itemdefinitions_dict %} -
- - +

{{_('Item Attribute')}}

+{% for function, itemdefinitions_dict in p.metadata.itemdefinitions.items() %} +
+
+ {{ function }}      {{('Beschreibung:')}} {{ itemdefinitions_dict['description'][language] }}       {{ ('Ergebnisdatentyp:')}} {{ itemdefinitions_dict['type'] }} +
+ {% if 'valid_list' in itemdefinitions_dict %} +
+
+ + + + + + + + + + {% for entry in itemdefinitions_dict['valid_list'] %} - - - + + + + - - - {% for entry in itemdefinitions_dict['valid_list'] %} - - - - - - {% endfor %} - -
{{_('Item Attributwert')}}{{_('Item_Type')}}{{_('Berechnung')}}{{_('Beschreibung')}}
{{_('Item Attributwert')}}{{_('Item_Type')}}{{_('Beschreibung')}}{{ entry }}{% if 'valid_list_item_type' in itemdefinitions_dict %} {{ itemdefinitions_dict.valid_list_item_type[loop.index0] }} {% else %} {{_('-')}} {% endif %}{% if 'valid_list_calculation' in itemdefinitions_dict %} {{ _(itemdefinitions_dict.valid_list_calculation[loop.index0] | string) }} {% else %} {{_('-')}} {% endif %}{% if 'valid_list_description' in itemdefinitions_dict %} {{ itemdefinitions_dict.valid_list_description[loop.index0] | string }} {% else %} {{_('-')}} {% endif %}
{{ entry }}{% if 'valid_list_item_type' in itemdefinitions_dict %} {{ itemdefinitions_dict.valid_list_item_type[loop.index0] }} {% else %} {{_('-')}} {% endif %}{% if 'valid_list_description' in itemdefinitions_dict %} {{ itemdefinitions_dict.valid_list_description[loop.index0] |string }} {% else %} {{_('-')}} {% endif %}
-
- {% endif %} -
- {% endfor %} + {% endfor %} + + +
+ {% endif %} +
+{% endfor %} -
-

{{_('Plugin Funktionen')}}

- {% for function, plugin_functions_dict in p.metadata.plugin_functions.items() %} -
-
- {{ function }}      {{('Beschreibung:')}} {{ plugin_functions_dict['description'][language] }}       {{ ('Ergebnisdatentyp:')}} {{ plugin_functions_dict['type'] }} -
+
+

{{_('Plugin Funktionen')}}

+{% for function, plugin_functions_dict in p.metadata.plugin_functions.items() %} +
+
+ {{ function }}      {{('Beschreibung:')}} {{ plugin_functions_dict['description'][language] }}       {{ ('Ergebnisdatentyp:')}} {{ plugin_functions_dict['type'] }} +
- {% if 'parameters' in plugin_functions_dict %} -
-
- - + {% if 'parameters' in plugin_functions_dict %} +
+
+
+ + + + + + + + + + {% for entry in plugin_functions_dict['parameters'] %} - - - - + + + + - - - {% for entry in plugin_functions_dict['parameters'] %} - - - - - - - {% endfor %} - -
{{_('Parameter')}}{{_('Beschreibung')}}{{_('Type')}}{{_('zulässige Werte')}}
{{_('Parameter')}}{{_('Beschreibung')}}{{_('Type')}}{{_('zulässige Werte')}}{{ entry }}{{ plugin_functions_dict['parameters'][entry]['description'][language] }}{{ plugin_functions_dict['parameters'][entry]['type'] }}{{ plugin_functions_dict['parameters'][entry]['valid_list'] }}
{{ entry }}{{ plugin_functions_dict['parameters'][entry]['description'][language] }}{{ plugin_functions_dict['parameters'][entry]['type'] }}{{ plugin_functions_dict['parameters'][entry]['valid_list'] }}
-
- {% endif %} -
- {% endfor %} + {% endfor %} + + +
+ {% endif %} +
+{% endfor %}
{% endblock bodytab3 %} diff --git a/dlms/README.md b/dlms/README.md index 032e5a049..81e18e603 100755 --- a/dlms/README.md +++ b/dlms/README.md @@ -42,8 +42,7 @@ The main module will use the ``manufacturer.yaml`` if it's existing to output mo ``` dlms: - class_name: DLMS - class_path: plugins.dlms + plugin_name: dlms serialport: /dev/dlms0 update_cycle: 900 # SUBSYSTEM==\"tty\", ATTRS{idVendor}==\"10c4\", ATTRS{idProduct}==\"ea60\", ATTRS{serial}==\"0092C9FE\", MODE=\"0666\", GROUP=\"dialout\", SYMLINK+=\"dlms0\" diff --git a/dlms/user_doc.rst b/dlms/user_doc.rst index 8636d5a45..93e077c7c 100755 --- a/dlms/user_doc.rst +++ b/dlms/user_doc.rst @@ -78,8 +78,7 @@ Beispiele für die plugin.yaml .. code:: yaml dlms: - class_name: DLMS - class_path: plugins.dlms + plugin_name: dlms serialport: /dev/dlms0 update_cycle: 900 diff --git a/dmx/README.md b/dmx/README.md index 8613467bc..ff892ff6b 100755 --- a/dmx/README.md +++ b/dmx/README.md @@ -16,8 +16,7 @@ A requirements file is provided to easy the installation. ```yaml dmx: - class_name: DMX - class_path: plugins.dmx + plugin_name: dmx serialport: /dev/usbtty... # interface = nanodmx ``` diff --git a/dmx/user_doc.rst b/dmx/user_doc.rst index 12f3d9c3f..770723718 100755 --- a/dmx/user_doc.rst +++ b/dmx/user_doc.rst @@ -30,8 +30,7 @@ plugin.yaml .. code :: yaml dmx: - class_name: DMX - class_path: plugins.dmx + plugin_name: dmx serialport: /dev/usbtty... # interface = nanodmx diff --git a/drexelundweiss/README.md b/drexelundweiss/README.md index df56e911d..766cb3a3a 100755 --- a/drexelundweiss/README.md +++ b/drexelundweiss/README.md @@ -54,8 +54,7 @@ The plugin detects the connected device type automatically: ```yaml DuW: - class_name: DuW - class_path: plugins.drexelundweiss + plugin_name: drexelundweiss tty: /dev/ttyUSB0 # Busmonitor: 1 # LU_ID: 130 diff --git a/easymeter/README.md b/easymeter/README.md index 0e303c01b..66e82b6ce 100755 --- a/easymeter/README.md +++ b/easymeter/README.md @@ -30,8 +30,7 @@ If you like, you can also give the serial port a descriptive name with this. ```yaml easymeter: - class_name: easymeter - class_path: plugins.easymeter + plugin_name: easymeter ``` Parameter for serial device are currently set to fix 9600/7E1. diff --git a/ecmd/README.md b/ecmd/README.md index c006f67f5..46b88fb03 100755 --- a/ecmd/README.md +++ b/ecmd/README.md @@ -1,4 +1,4 @@ -# ECMD +# ecmd ## Requirements @@ -23,8 +23,7 @@ You can specify the host ip of your ethersex device. ```yaml ecmd: - class_name: ECMD - class_path: plugins.ecmd + plugin_name: ecmd host: 10.10.10.10 # port: 2701 ``` diff --git a/elro/README.md b/elro/README.md index a0d7f0213..f8204adba 100755 --- a/elro/README.md +++ b/elro/README.md @@ -20,8 +20,7 @@ You have to just simply copy the following into your plugin.yaml file. The ip-ad ```yaml elro: - class_name: Elro - class_path: plugins.elro + plugin_name: elro ``` ### items.yaml diff --git a/enigma2/README.md b/enigma2/README.md index 4f3d8b6be..06b3be1e6 100755 --- a/enigma2/README.md +++ b/enigma2/README.md @@ -26,8 +26,7 @@ The samples use multi-instance feature of SmartHomeNG. ```yaml vusolo4k: - class_name: Enigma2 - class_path: plugins.enigma2 + plugin_name: enigma2 host: xxx.xxx.xxx.xxx port: 81 # 81 for "vu"-boxes, it may be port 80 for a dreambox cycle: 240 @@ -37,8 +36,7 @@ vusolo4k: instance: vusolo4k vusolo2: - class_name: Enigma2 - class_path: plugins.enigma2 + plugin_name: enigma2 host: xxx.xxx.xxx.xxx port: 81 # 81 for "vu"-boxes, it may be port 80 for a dreambox cycle: 240 diff --git a/enocean/__init__.py b/enocean/__init__.py index 178047b47..1363441a9 100755 --- a/enocean/__init__.py +++ b/enocean/__init__.py @@ -166,7 +166,7 @@ class EnOcean(SmartPlugin): ALLOW_MULTIINSTANCE = False - PLUGIN_VERSION = "1.3.7" + PLUGIN_VERSION = "1.3.8" def __init__(self, sh, *args, **kwargs): @@ -576,6 +576,10 @@ def parse_item(self, item): if 'enocean_tx_eep' in item.conf: self.logger.debug(f"TX eep found in item {item._name}") + if not 'enocean_tx_id_offset' in item.conf: + self.logger.error(f"TX eep found for item {item._name} but no tx id offset specified.") + return + tx_offset = item.conf['enocean_tx_id_offset'] if not (tx_offset in self._used_tx_offsets): self._used_tx_offsets.append(tx_offset) diff --git a/enocean/eep_parser.py b/enocean/eep_parser.py index 6d959ef1a..e017f7014 100755 --- a/enocean/eep_parser.py +++ b/enocean/eep_parser.py @@ -275,7 +275,7 @@ def _parse_eep_A5_30_03(self, payload, status): if not (payload[3] == 0x0F): self.logger.error("EEP A5_30_03 not according to spec.") return results - # Data_byte2 = Temperatur 0..40C (255..0) + # Data_byte2 = Temperatur 0...40 °C (255...0) results['TEMP'] = 40 - (payload[1]/255*40) # Data_byte1 = 0x0F = Alarm, 0x1F = kein Alarm results['ALARM'] = (payload[2] == 0x0F) @@ -452,6 +452,8 @@ def _parse_eep_F6_0G_03(self, payload, status): B: status of the shutter actor (command) ''' self.logger.debug("Processing F6_0G_03: shutter actor") + self.logger.debug("payload = [{}]".format(', '.join(['0x%02X' % b for b in payload]))) + self.logger.debug("status: {}".format(status)) results = {} if (payload[0] == 0x70): results['POSITION'] = 0 @@ -460,9 +462,10 @@ def _parse_eep_F6_0G_03(self, payload, status): results['POSITION'] = 255 results['B'] = 0 elif (payload[0] == 0x01): - results['STATUS'] = 'Start movin up' + results['STATUS'] = 'Start moving up' results['B'] = 1 elif (payload[0] == 0x02): - results['STATUS'] = 'Start movin down' + results['STATUS'] = 'Start moving down' results['B'] = 2 + self.logger.debug('parse_eep_F6_0G_03 returns: {}'.format(results)) return results diff --git a/enocean/plugin.yaml b/enocean/plugin.yaml index 2d56f16cd..a020510a8 100755 --- a/enocean/plugin.yaml +++ b/enocean/plugin.yaml @@ -16,7 +16,7 @@ plugin: # url of the support thread support: https://knx-user-forum.de/forum/supportforen/smarthome-py/26542-featurewunsch-enocean-plugin/page13 - version: 1.3.7 # Plugin version + version: 1.3.8 # Plugin version sh_minversion: 1.3 # minimum shNG version to use this plugin #sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: False # plugin supports multi instance diff --git a/eta_pu/README.md b/eta_pu/README.md index ad1ce2095..efc58cca7 100755 --- a/eta_pu/README.md +++ b/eta_pu/README.md @@ -1,4 +1,4 @@ -# ETA Pellet Unit PU +# eta_pu - ETA Pellet Unit PU ## Requirements @@ -12,8 +12,7 @@ ```yaml eta_pu: - class_name: ETA_PU - class_path: plugins.eta_pu + plugin_name: eta_pu address: 192.168.179.15 port: 8080 setpath: /user/vars diff --git a/harmony/README.md b/harmony/README.md index cb0ea22a5..f525b9334 100755 --- a/harmony/README.md +++ b/harmony/README.md @@ -103,8 +103,7 @@ for a Harmony Hub activity the activity id. ```yaml harmony: - class_name: Harmony - class_path: plugins.harmony + plugin_name: harmony harmony_ip: 192.168.178.78 #harmony_port: 5222 # [default: 5222, int] #sleekxmpp_debug: false #[default:false, bool] diff --git a/helios/README.md b/helios/README.md index 686188a7a..7a9d0fb2e 100755 --- a/helios/README.md +++ b/helios/README.md @@ -1,4 +1,4 @@ -# Helios ECx00Pro / Vallox xx SE Plugin +# helios - Helios ECx00Pro / Vallox xx SE Plugin Detailed documentation can be found on the Wiki: https://github.com/Tom-Bom-badil/helios/wiki @@ -17,8 +17,7 @@ Add the following lines to ``plugin.yaml``: ```yaml helios: - class_name: helios - class_path: plugins.helios + plugin_name: helios tty: /dev/ttyUSB0 # put your serial port here (usually /dev/ttyUSB0 or /dev/ttyAMA0) cycle: 60 # update interval in seconds; ex-default: 300 ``` diff --git a/helios_tcp/user_doc.rst b/helios_tcp/user_doc.rst index 64b262013..1750add48 100755 --- a/helios_tcp/user_doc.rst +++ b/helios_tcp/user_doc.rst @@ -12,14 +12,13 @@ Die Lüftungsanlage ... - ... ist mit dem lokalen Netzwerk verbunden - ... hat "Modbus" unter Konfiguration --> Gerät aktiviert - + Einrichtung ----------- pymodbus3 muss installiert sein. Weiterhin ist das Plugin in der plugin.yml zu aktivieren:: helios_tcp: - class_name: HeliosTCP - class_path: plugins.helios_tcp + plugin_name: helios_tcp helios_ip: < IP-Adresse des Lüftungsgeräts > update_cycle: < Abstand in Sekunden, nachdem die Werte aktualisiert werden sollen > @@ -42,58 +41,58 @@ Die folgenden Variablen stehen zur Verfügung und können abgefragt werden. Einige Werte stehen je nach Systemkonfiguration nicht in jeder Lüftungsanlage zur Verfügung. ======================== ============================================== ======== ========== ===== ===== -Variable Beschreibung Datentyp Schreibbar Min Max +Variable Beschreibung Datentyp Schreibbar Min Max ======================== ============================================== ======== ========== ===== ===== -outside_temp Temperatur Außenluft float -exhaust_temp Temperatur Abluft float -inside_temp Temperatur Fortluft float -incoming_temp Temperatur Zuluft float -pre_heating_temp VHZ Kanalfüler (-Außenluft- T5) float -post_heating_temp NHZ Kanalfühler (-Zuluft- T6) float -post_heating_reflux_temp NHZ Rücklauffühler (-Warmwasser-Register- T7) float -error_count Anzahl der Fehler int -warning_count Anzahl der Warnungenint int -info_count Anzahl der Infos int -fan_in_rpm Zuluft rpm int -fan_out_rpm Abluft rpm int -internal_humidity Interner Luftfeuchtigkeitsfühler int -sensor1_humidity Externer Fühler KWL-FTF Feuchte 1 int -sensor2_humidity Externer Fühler KWL-FTF Feuchte 2 int -sensor3_humidity Externer Fühler KWL-FTF Feuchte 3 int -sensor4_humidity Externer Fühler KWL-FTF Feuchte 4 int -sensor5_humidity Externer Fühler KWL-FTF Feuchte 5 int -sensor6_humidity Externer Fühler KWL-FTF Feuchte 6 int -sensor7_humidity Externer Fühler KWL-FTF Feuchte 7 int -sensor8_humidity Externer Fühler KWL-FTF Feuchte 8 int -sensor1_temperature Externer Fühler KWL-FTF Temp 1 float -sensor2_temperature Externer Fühler KWL-FTF Temp 2 float -sensor3_temperature Externer Fühler KWL-FTF Temp 3 float -sensor4_temperature Externer Fühler KWL-FTF Temp 4 float -sensor5_temperature Externer Fühler KWL-FTF Temp 5 float -sensor6_temperature Externer Fühler KWL-FTF Temp 6 float -sensor7_temperature Externer Fühler KWL-FTF Temp 7 float -sensor8_temperature Externer Fühler KWL-FTF Temp 8 float -sensor1_co2 Externer Fühler KWL-CO2 1 float -sensor2_co2 Externer Fühler KWL-CO2 2 float -sensor3_co2 Externer Fühler KWL-CO2 3 float -sensor4_co2 Externer Fühler KWL-CO2 4 float -sensor5_co2 Externer Fühler KWL-CO2 5 float -sensor6_co2 Externer Fühler KWL-CO2 6 float -sensor7_co2 Externer Fühler KWL-CO2 7 float -sensor8_co2 Externer Fühler KWL-CO2 8 float -sensor1_voc Externer Fühler KWL-VOC 1 float -sensor2_voc Externer Fühler KWL-VOC 2 float -sensor3_voc Externer Fühler KWL-VOC 3 float -sensor4_voc Externer Fühler KWL-VOC 4 float -sensor5_voc Externer Fühler KWL-VOC 5 float -sensor6_voc Externer Fühler KWL-VOC 6 float -sensor7_voc Externer Fühler KWL-VOC 7 float -sensor8_voc Externer Fühler KWL-VOC 8 float -filter_remaining Restlaufzeit int -boost_remaining Partybetrieb Restzeit int -sleep_remaining Ruhebetrieb Restzeit int -fan_level_percent Prozentuale Lüfterstufe int -bypass_open Bypass geöffnet bool +outside_temp Temperatur Außenluft float +exhaust_temp Temperatur Abluft float +inside_temp Temperatur Fortluft float +incoming_temp Temperatur Zuluft float +pre_heating_temp VHZ Kanalfüler (-Außenluft- T5) float +post_heating_temp NHZ Kanalfühler (-Zuluft- T6) float +post_heating_reflux_temp NHZ Rücklauffühler (-Warmwasser-Register- T7) float +error_count Anzahl der Fehler int +warning_count Anzahl der Warnungenint int +info_count Anzahl der Infos int +fan_in_rpm Zuluft rpm int +fan_out_rpm Abluft rpm int +internal_humidity Interner Luftfeuchtigkeitsfühler int +sensor1_humidity Externer Fühler KWL-FTF Feuchte 1 int +sensor2_humidity Externer Fühler KWL-FTF Feuchte 2 int +sensor3_humidity Externer Fühler KWL-FTF Feuchte 3 int +sensor4_humidity Externer Fühler KWL-FTF Feuchte 4 int +sensor5_humidity Externer Fühler KWL-FTF Feuchte 5 int +sensor6_humidity Externer Fühler KWL-FTF Feuchte 6 int +sensor7_humidity Externer Fühler KWL-FTF Feuchte 7 int +sensor8_humidity Externer Fühler KWL-FTF Feuchte 8 int +sensor1_temperature Externer Fühler KWL-FTF Temp 1 float +sensor2_temperature Externer Fühler KWL-FTF Temp 2 float +sensor3_temperature Externer Fühler KWL-FTF Temp 3 float +sensor4_temperature Externer Fühler KWL-FTF Temp 4 float +sensor5_temperature Externer Fühler KWL-FTF Temp 5 float +sensor6_temperature Externer Fühler KWL-FTF Temp 6 float +sensor7_temperature Externer Fühler KWL-FTF Temp 7 float +sensor8_temperature Externer Fühler KWL-FTF Temp 8 float +sensor1_co2 Externer Fühler KWL-CO2 1 float +sensor2_co2 Externer Fühler KWL-CO2 2 float +sensor3_co2 Externer Fühler KWL-CO2 3 float +sensor4_co2 Externer Fühler KWL-CO2 4 float +sensor5_co2 Externer Fühler KWL-CO2 5 float +sensor6_co2 Externer Fühler KWL-CO2 6 float +sensor7_co2 Externer Fühler KWL-CO2 7 float +sensor8_co2 Externer Fühler KWL-CO2 8 float +sensor1_voc Externer Fühler KWL-VOC 1 float +sensor2_voc Externer Fühler KWL-VOC 2 float +sensor3_voc Externer Fühler KWL-VOC 3 float +sensor4_voc Externer Fühler KWL-VOC 4 float +sensor5_voc Externer Fühler KWL-VOC 5 float +sensor6_voc Externer Fühler KWL-VOC 6 float +sensor7_voc Externer Fühler KWL-VOC 7 float +sensor8_voc Externer Fühler KWL-VOC 8 float +filter_remaining Restlaufzeit int +boost_remaining Partybetrieb Restzeit int +sleep_remaining Ruhebetrieb Restzeit int +fan_level_percent Prozentuale Lüfterstufe int +bypass_open Bypass geöffnet bool humidity_control_status Feuchte-Steuerung Status int X 0 2 humidity_control_target Feuchte-Steuerung Sollwert int X 20 80 co2_control_status CO2-Steuerung Status int X 0 2 diff --git a/husky2/__init__.py b/husky2/__init__.py index 89f33023a..1809dafe9 100755 --- a/husky2/__init__.py +++ b/husky2/__init__.py @@ -55,14 +55,10 @@ async def refresh_token(self): else: session._LOGGER.debug("Refresh access token doing relogin") self.shLogger.debug("Refresh access token doing relogin") - #await self.close() - #self.shLogger.debug("Closed old session") await asyncio.sleep(5) await self.logincc(self.client_secret) self.shLogger.debug("Logged in successfully") await asyncio.sleep(5) - #await self.connect() - #self.shLogger.debug("Connected successfully") class Husky2(SmartPlugin): @@ -75,7 +71,7 @@ class properties and methods (class variables and class functions) are already available! """ - PLUGIN_VERSION = '2.1.0' # (must match the version specified in plugin.yaml), use '1.0.0' for your initial plugin Release + PLUGIN_VERSION = '2.1.1' ITEM_INFO = "husky_info" ITEM_CONTROL = "husky_control" @@ -318,7 +314,8 @@ def huky2Thread(self): self.asyncLoop.run_until_complete(task) except CancelledError: pass - + except AttributeError: + self.logger.debug("No other running async Tasks to cancel") except Exception as e: self.logger.warning(f"husky2_thread: finally *2 - Exception {e}") try: @@ -732,7 +729,7 @@ def getTimedeltas(self): for data in self.mowerTimestamp.get_list(): min = int((now - (data / 1000.0)) / 60.0) sec = int((now - (data / 1000.0)) % 60.0) - deltas.append(f"{min}:{sec}") + deltas.append(f"{min}:{sec:02d}") return deltas def getErrormessages(self): @@ -824,9 +821,11 @@ def index(self, reload=None): """ tmpl = self.tplenv.get_template('index.html') + items_count = len(self.plugin._items_control) + len(self.plugin._items_state) + # add values to be passed to the Jinja2 template eg: tmpl.render(p=self.plugin, interface=interface, ...) - return tmpl.render(p=self.plugin, device_count=self.plugin.mowerCount, items_control=self.plugin._items_control, - items_state=self.plugin._items_state) + return tmpl.render(p=self.plugin, device_count=self.plugin.mowerCount, items_count=items_count, + items_control=self.plugin._items_control, items_state=self.plugin._items_state) @cherrypy.expose def mower_park(self): diff --git a/husky2/plugin.yaml b/husky2/plugin.yaml index a3b5eb0a4..bed222c30 100755 --- a/husky2/plugin.yaml +++ b/husky2/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: https://smarthomeng.github.io/smarthome/plugins/husky2/user_doc.html support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1764058-support-thread - version: 2.1.0 # Plugin version (must match the version specified in __init__.py) + version: 2.1.1 # Plugin version (must match the version specified in __init__.py) sh_minversion: 1.8 # minimum shNG version to use this plugin multi_instance: false # plugin supports multi instance restartable: unknown diff --git a/husky2/requirements.txt b/husky2/requirements.txt index 3628d36b5..cdea8ffc8 100755 --- a/husky2/requirements.txt +++ b/husky2/requirements.txt @@ -1 +1 @@ -aioautomower==2022.9.0 \ No newline at end of file +aioautomower==2023.3.0 \ No newline at end of file diff --git a/husky2/webif/templates/index.html b/husky2/webif/templates/index.html index 356ba5f30..79f69ddb1 100755 --- a/husky2/webif/templates/index.html +++ b/husky2/webif/templates/index.html @@ -1,11 +1,102 @@ {% extends "base_plugin.html" %} {% set logo_frame = false %} -{% set items_count = items_control|length + items_state|length %} + + + + + +{% block pluginstyles %} + +{% endblock pluginstyles %} + + +{% block pluginscripts %} + + + + +{% endblock pluginscripts %} + {% block headtable %} - - + +
+ {% if p.getToken() %} @@ -29,17 +120,7 @@ -{% block buttons %} -{% if 1==2 %} - - -{% endif %} -{% endblock %} - - {% set tabcount = 2 %} @@ -48,7 +129,7 @@ Set the tab that will be visible on start, if another tab that 1 is wanted (1 - 3) --> {% if item_count==0 %} -{% set start_tab = 1 %} + {% set start_tab = 2 %} {% endif %} @@ -57,7 +138,9 @@ --> {% set tab1title = "" ~ p.get_shortname() ~ " Geräte (" ~ device_count ~ ")" %} {% block bodytab1 %} -
+
+ +
Token
@@ -210,121 +293,112 @@ - - -
-

{{ p.translate("History") }}

- - - - - - - - - - - - - {% for stamp in p.getTimestamps() %} - - - - {% if p.getErrormessages()[loop.index0] == p.translate("No error") %} - - {% else %} - - {% endif %} - - - - {% endfor %} - - -
{{ p.translate("Timestamp") }}{{ p.translate("Timedelta min:sec") }}{{ p.translate("Activity") }}{{ p.translate("Battery") }}{{ p.translate("Coordinates") }}
{{ stamp }}{{ p.getTimedeltas()[loop.index0] }}{{ p.translate(p.getActivities()[loop.index0]) + ", " + - p.translate(p.getErrormessages()[loop.index0]) }}{{ p.translate(p.getActivities()[loop.index0]) + ", " + - p.translate(p.getErrormessages()[loop.index0]) }}{{ p.getBatterypercents()[loop.index0] }} %{{ p.getLongitudes()[loop.index0] }}, {{ p.getLatitudes()[loop.index0] }}
-
- +

{{ p.translate("History") }}

+
+ + + + + + + + + + + + + + + + {% for stamp in p.getTimestamps() %} + + + + + + {% if p.getErrormessages()[loop.index0] == p.translate("No error") %} + + {% else %} + + {% endif %} + + + + {% endfor %} + +
{{ p.translate("Timestamp") }}{{ p.translate("Timedelta min:sec") }}{{ p.translate("Activity") }}{{ p.translate("Battery") }}{{ p.translate("Coordinates") }}
{{ stamp }}{{ p.getTimedeltas()[loop.index0] }}{{ p.translate(p.getActivities()[loop.index0]) + ", " + + p.translate(p.getErrormessages()[loop.index0]) }}{{ p.translate(p.getActivities()[loop.index0]) + ", " + + p.translate(p.getErrormessages()[loop.index0]) }}{{ p.getBatterypercents()[loop.index0] }} %{{ p.getLongitudes()[loop.index0] }}, {{ p.getLatitudes()[loop.index0] }}
- {% endblock bodytab1 %} - {% set tab2title = "" ~ p.get_shortname() ~ " Items (" ~ items_count ~ ")" %} {% block bodytab2 %} -
- +

Control Items

- - - - - - - - - - {% for key in items_control %} - {% for item in items_control[key] %} - - - - - - {% endfor %} - {% endfor %} - -
{{ p.translate("Key") }}{{ p.translate("Description") }}{{ p.translate("Path") }}
{{ key }}{{ item }}{{ item._path }}
- - + + + + + + + + + + + + + {% for key in items_control %} + {% for item in items_control[key] %} + + + + + + + + {% endfor %} + {% endfor %} + +
{{ p.translate("Key") }}{{ p.translate("Description") }}{{ p.translate("Path") }}
{{ key }}{{ item.property.name }}{{ item.property.path }}
+

State Items

- - - - - - - - - - - - - {% for key in items_state %} - {% for item in items_state[key] %} - - - - - - - - - {% endfor %} - {% endfor %} - -
{{ p.translate("Key") }}{{ p.translate("Path") }}{{ p.translate("Value") }}{{ p.translate("Last Update") }}{{ p.translate("Last Change") }}
{{ key }}{{ item._path }}{{ item() }}{{ item.last_update().strftime('%d.%m.%Y %H:%M:%S') }}{{ item.last_change().strftime('%d.%m.%Y %H:%M:%S') }}
+ + + + + + + + + + + + + + {% for key in items_state %} + {% for item in items_state[key] %} + + + + + + {% if item.property.value is iterable and (item.property.value is not string and item.property.value is not mapping) %} + + {% else %} + + {% endif %} + + + + {% endfor %} + {% endfor %} + +
{{ p.translate("Key") }}{{ p.translate("Path") }}{{ p.translate("Value") }}{{ p.translate("Last Update") }}{{ p.translate("Last Change") }}
{{ key }}{{ item.property.path }}---{{ p.translate(item.property.value)}}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
{% endblock bodytab2 %} - -{% block pluginscripts %} - -{% endblock pluginscripts %} - diff --git a/iaqstick/README.md b/iaqstick/README.md index c684b184b..8478ead8c 100755 --- a/iaqstick/README.md +++ b/iaqstick/README.md @@ -1,4 +1,4 @@ -# iAQ Stick +# iaqstick ## Requirements @@ -28,8 +28,7 @@ udevadm trigger ```yaml iaqstick: - class_name: iAQ_Stick - class_path: plugins.iaqstick + plugin_name: iaqstick # update_cycle: 10 ``` diff --git a/ical/__init__.py b/ical/__init__.py index 21f41dc5b..6dd1b11ac 100755 --- a/ical/__init__.py +++ b/ical/__init__.py @@ -33,7 +33,7 @@ class iCal(SmartPlugin): - PLUGIN_VERSION = "1.6.1" + PLUGIN_VERSION = "1.6.2" ALLOW_MULTIINSTANCE = False DAYS = ("MO", "TU", "WE", "TH", "FR", "SA", "SU") FREQ = ("YEARLY", "MONTHLY", "WEEKLY", "DAILY", "HOURLY", "MINUTELY", "SECONDLY") @@ -290,7 +290,7 @@ def _parse_ical(self, ical, ics, prio): self.logger.warning("problem parsing {0} no DTSTART for UID: {1}".format(ics, event['UID'])) continue if 'DTEND' not in event: - self.logger.warning("Warning in parsing {0} no DTEND for UID: {1}. Setting DTEND from DTSTART".format(ics, event['UID'])) + self.logger.info("Warning in parsing {0} no DTEND for UID: {1}. Setting DTEND from DTSTART".format(ics, event['UID'])) # Set end to start time: event['DTEND'] = event['DTSTART'] continue diff --git a/ical/plugin.yaml b/ical/plugin.yaml index 44ac58dfe..a5cd7066f 100755 --- a/ical/plugin.yaml +++ b/ical/plugin.yaml @@ -13,14 +13,14 @@ plugin: You can use offline files and online feeds. ' - maintainer: cmalo (mknx) - tester: onkelandy + maintainer: onkelandy, cmalo (mknx) + tester: '?' state: ready keywords: ical ics calendar # documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1352089-support-thread-zum-ical-plugin - version: 1.6.1 # Plugin version + version: 1.6.2 # Plugin version sh_minversion: 1.9.0 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: false # plugin supports multi instance diff --git a/indego/README.md b/indego/README.md index 1206818bd..ec8a390d5 100755 --- a/indego/README.md +++ b/indego/README.md @@ -1,4 +1,4 @@ -# Indego Plugin +# indego Plugin #### Version 1.x.y @@ -60,8 +60,7 @@ Please refer to the documentation generated from plugin.yaml metadata. ```yaml MyIndego: - class_name: Indego - class_path: plugins.indego + plugin_name: indego user: 'NUTZERNAME' # -> you need to use the name that you used on your Indego App password: 'PASSWORT' # -> you need to use the password that you used on your Indego App cycle: 30 # frequency of when how often the status is updated, is working without a problem with 30seconds, not sure when the server will start to be annoyed by your requests diff --git a/indego4shng/__init__.py b/indego4shng/__init__.py index ec7513545..9888b1ebf 100755 --- a/indego4shng/__init__.py +++ b/indego4shng/__init__.py @@ -64,7 +64,7 @@ class Indego4shNG(SmartPlugin): Main class of the Indego Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '3.0.2' + PLUGIN_VERSION = '4.0.0' def __init__(self, sh, *args, **kwargs): """ @@ -141,6 +141,10 @@ def __init__(self, sh, *args, **kwargs): self.providers = {} self.mowertype = {} + self._refresh_token = '' + self._bearer = '' + self.token_expires = '' + # The following part of the __init__ method is only needed, if a webinterface is being implemented: # if plugin should start even without web interface @@ -162,15 +166,16 @@ def run(self): self.password = self.credentials.split(":")[1] # taken from Init of the plugin if (self.user != '' and self.password != ''): - self._auth() - self.logged_in = self._check_auth() + # self._auth() deprecated + self.logged_in = self._login2Bosch() # start the refresh timers self.scheduler_add('operating_data',self._get_operating_data,cycle = 300) self.scheduler_add('get_state', self._get_state, cycle = self.cycle) self.scheduler_add('alert', self.alert, cycle=300) self.scheduler_add('get_all_calendars', self._get_all_calendars, cycle=300) - self.scheduler_add('check_login_state', self._check_login_state, cycle=130) + #self.scheduler_add('check_login_state', self._check_login_state, cycle=130) + self.scheduler_add('refresh_token', self._getrefreshToken, cycle=self.token_expires-100) self.scheduler_add('device_data', self._device_data, cycle=120) self.scheduler_add('get_weather', self._get_weather, cycle=600) self.scheduler_add('get_next_time', self._get_next_time, cycle=300) @@ -188,7 +193,8 @@ def stop(self): self.scheduler_remove('get_state') self.scheduler_remove('alert') self.scheduler_remove('get_all_calendars') - self.scheduler_remove('check_login_state') + #self.scheduler_remove('check_login_state') + self.scheduler_remove('refresh_token') self.scheduler_remove('device_date') self.scheduler_remove('get_weather') self.scheduler_remove('get_next_time') @@ -830,9 +836,12 @@ def _delete_url(self, url, contextid=None, timeout=40, auth=None,nowait = True): myCouner += 1 time.sleep(2) - headers = { - 'x-im-context-id' : self.context_id - } + headers = {'accept-encoding' : 'gzip', + 'authorization' : 'Bearer '+ self._bearer, + 'connection' : 'Keep-Alive', + 'host' : 'api.indego-cloud.iot.bosch-si.com', + 'user-agent' : 'Indego-Connect_4.0.0.12253' + } response = False try: response = requests.delete(url, headers=headers, auth=auth) @@ -861,16 +870,19 @@ def _get_url(self, url, contextid=None, timeout=40, auth=None): myCouner += 1 time.sleep(2) - headers = { - 'x-im-context-id' : self.context_id - } + headers = {'accept-encoding' : 'gzip', + 'authorization' : 'Bearer '+ self._bearer, + 'connection' : 'Keep-Alive', + 'host' : 'api.indego-cloud.iot.bosch-si.com', + 'user-agent' : 'Indego-Connect_4.0.0.12253' + } response = False try: if auth == None: response = requests.get(url, headers=headers) else: response = requests.get(url, headers=headers, auth=auth) - self._log_communication('get ', url, response.status_code) + self._log_communication('GET ', url, response.status_code) except Exception as e: self.logger.warning("Problem fetching {}: {}".format(url, e)) return False @@ -902,12 +914,12 @@ def _post_url(self, url, contextid=None, body=None, timeout=2, auth = "", nowait myCouner += 1 time.sleep(2) - if (contextid != None and contextid != ""): - headers = { - 'x-im-context-id' : self.context_id - } - else: - headers = "" + headers = {'accept-encoding' : 'gzip', + 'authorization' : 'Bearer '+ self._bearer, + 'connection' : 'Keep-Alive', + 'host' : 'api.indego-cloud.iot.bosch-si.com', + 'user-agent' : 'Indego-Connect_4.0.0.12253' + } response = False try: @@ -937,9 +949,12 @@ def _put_url(self, url, contextid=None, body=None, timeout=2): myCouner += 1 time.sleep(2) - headers = { - 'x-im-context-id' : contextid - } + headers = {'accept-encoding' : 'gzip', + 'authorization' : 'Bearer '+ self._bearer, + 'connection' : 'Keep-Alive', + 'host' : 'api.indego-cloud.iot.bosch-si.com', + 'user-agent' : 'Indego-Connect_4.0.0.12253' + } response = False try: @@ -1072,6 +1087,449 @@ def _auth(self): self.logger.info("Serial received : {}".format(self.alm_sn)) self._log_communication('Auth ', 'Expiration time {}'.format(expiration_timestamp), str(auth_response)) + def _getrefreshToken(self): + myUrl = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/token' + mySession = requests.session() + mySession.headers['accept'] = 'application/json' + mySession.headers['accept-encoding'] = 'gzip' + mySession.headers['connection'] = 'Keep-Alive' + mySession.headers['content-type'] = 'application/x-www-form-urlencoded' + mySession.headers['host'] = 'prodindego.b2clogin.com' + mySession.headers['user-agent'] = 'Dalvik/2.1.0 (Linux; U; Android 11; sdk_gphone_x86_arm Build/RSR1.201013.001)' + params = { + "grant_type":"refresh_token", + "refresh_token": self._refresh_token + } + + response = requests.post(myUrl, data=params) + self._log_communication('POST ', myUrl, response.status_code) + + myJson = json.loads (response.content.decode()) + self._refresh_token = myJson['refresh_token'] + self._bearer = myJson['access_token'] + self.token_expires = myJson['expires_in'] + self.last_login_timestamp = datetime.timestamp(datetime.now()) + self.expiration_timestamp = self.last_login_timestamp + self.token_expires + + def _login2Bosch(self): + # Standardvalues + self.login_pending = True + code_challenge = 'iGz3HXMCebCh65NomBE5BbfSTBWE40xLew2JeSrDrF4' + code_verifier = '9aOBN3dvc634eBaj7F8iUnppHeqgUTwG7_3sxYMfpcjlIt7Uuv2n2tQlMLhsd0geWMNZPoryk_bGPmeZKjzbwA' + nonce = 'LtRKgCy_l1abdbKPuf5vhA' + myClientID = '65bb8c9d-1070-4fb4-aa95-853618acc876' # that the Client-ID for the Bosch-App + + myPerfPayload ={ + "navigation": { + "type": 0, + "redirectCount": 0 + }, + "timing": { + "connectStart": 1678187315976, + "navigationStart": 1678187315876, + "loadEventEnd": 1678187317001, + "domLoading": 1678187316710, + "secureConnectionStart": 1678187315994, + "fetchStart": 1678187315958, + "domContentLoadedEventStart": 1678187316973, + "responseStart": 1678187316262, + "responseEnd": 1678187316322, + "domInteractive": 1678187316973, + "domainLookupEnd": 1678187315958, + "redirectStart": 0, + "requestStart": 1678187316010, + "unloadEventEnd": 0, + "unloadEventStart": 0, + "domComplete": 1678187317001, + "domainLookupStart": 1678187315958, + "loadEventStart": 1678187317001, + "domContentLoadedEventEnd": 1678187316977, + "redirectEnd": 0, + "connectEnd": 1678187316002 + }, + "entries": [ + { + "name": "https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/authorize?redirect_uri=com.bosch.indegoconnect%3A%2F%2Flogin&client_id=65bb8c9d-1070-4fb4-aa95-853618acc876&response_type=code&state=j1A8L2zQMbolEja6yqbj4w&nonce=LtRKgCy_l1abdbKPuf5vhA&scope=openid%20profile%20email%20offline_access%20https%3A%2F%2Fprodindego.onmicrosoft.com%2Findego-mobile-api%2FIndego.Mower.User&code_challenge={}&code_challenge_method=S256".format(code_challenge), + "entryType": "navigation", + "startTime": 0, + "duration": 1125.3999999999849, + "initiatorType": "navigation", + "nextHopProtocol": "http/1.1", + "workerStart": 0, + "redirectStart": 0, + "redirectEnd": 0, + "fetchStart": 82.29999999997517, + "domainLookupStart": 82.29999999997517, + "domainLookupEnd": 82.29999999997517, + "connectStart": 99.99999999999432, + "connectEnd": 126.29999999998631, + "secureConnectionStart": 117.4999999999784, + "requestStart": 133.7999999999795, + "responseStart": 385.5999999999824, + "responseEnd": 445.699999999988, + "transferSize": 66955, + "encodedBodySize": 64581, + "decodedBodySize": 155950, + "serverTiming": [], + "workerTiming": [], + "unloadEventStart": 0, + "unloadEventEnd": 0, + "domInteractive": 1097.29999999999, + "domContentLoadedEventStart": 1097.29999999999, + "domContentLoadedEventEnd": 1100.999999999999, + "domComplete": 1125.2999999999815, + "loadEventStart": 1125.3999999999849, + "loadEventEnd": 1125.3999999999849, + "type": "navigate", + "redirectCount": 0 + }, + { + "name": "https://swsasharedprodb2c.blob.core.windows.net/b2c-templates/bosch/unified.html", + "entryType": "resource", + "startTime": 1038.0999999999858, + "duration": 21.600000000006503, + "initiatorType": "xmlhttprequest", + "nextHopProtocol": "", + "workerStart": 0, + "redirectStart": 0, + "redirectEnd": 0, + "fetchStart": 1038.0999999999858, + "domainLookupStart": 0, + "domainLookupEnd": 0, + "connectStart": 0, + "connectEnd": 0, + "secureConnectionStart": 0, + "requestStart": 0, + "responseStart": 0, + "responseEnd": 1059.6999999999923, + "transferSize": 0, + "encodedBodySize": 0, + "decodedBodySize": 0, + "serverTiming": [], + "workerTiming": [] + }, + { + "name": "https://swsasharedprodb2c.blob.core.windows.net/b2c-templates/bosch/bosch-header.png", + "entryType": "resource", + "startTime": 1312.7999999999815, + "duration": 7.900000000006457, + "initiatorType": "css", + "nextHopProtocol": "", + "workerStart": 0, + "redirectStart": 0, + "redirectEnd": 0, + "fetchStart": 1312.7999999999815, + "domainLookupStart": 0, + "domainLookupEnd": 0, + "connectStart": 0, + "connectEnd": 0, + "secureConnectionStart": 0, + "requestStart": 0, + "responseStart": 0, + "responseEnd": 1320.699999999988, + "transferSize": 0, + "encodedBodySize": 0, + "decodedBodySize": 0, + "serverTiming": [], + "workerTiming": [] + } + ], + "connection": { + "onchange": None, + "effectiveType": "4g", + "rtt": 150, + "downlink": 1.6, + "saveData": False, + "downlinkMax": None, + "type": "unknown", + "ontypechange": None + } + } + + myReqPayload = { + "pageViewId":'', + "pageId":"CombinedSigninAndSignup", + "trace":[ + { + "ac":"T005", + "acST":1678187316, + "acD":7 + }, + { + "ac":"T021 - URL:https://swsasharedprodb2c.blob.core.windows.net/b2c-templates/bosch/unified.html", + "acST":1678187316, + "acD":119 + }, + { + "ac":"T019", + "acST":1678187317, + "acD":44 + }, + { + "ac":"T004", + "acST":1678187317, + "acD":19 + }, + { + "ac":"T003", + "acST":1678187317, + "acD":5 + }, + { + "ac":"T035", + "acST":1678187317, + "acD":0 + }, + { + "ac":"T030Online", + "acST":1678187317, + "acD":0 + }, + { + "ac":"T002", + "acST":1678187328, + "acD":0 + } + ] + } + # Create a session + mySession = requests.session() + + # Collect some Cookies + + url = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/authorize?redirect_uri=com.bosch.indegoconnect%3A%2F%2Flogin&client_id={}&response_type=code&state=j1A8L2zQMbolEja6yqbj4w&nonce=LtRKgCy_l1abdbKPuf5vhA&scope=openid%20profile%20email%20offline_access%20https%3A%2F%2Fprodindego.onmicrosoft.com%2Findego-mobile-api%2FIndego.Mower.User&code_challenge={}&code_challenge_method=S256'.format(myClientID,code_challenge) + + myHeader = {'accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9', + 'accept-encoding' : 'gzip, deflate, br', + 'accept-language' : 'en-US', + 'connection' : 'keep-alive', + 'host' : 'prodindego.b2clogin.com', + 'user-agent' : 'Mozilla/5.0 (Linux; Android 11; sdk_gphone_x86_arm) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Mobile Safari/537.36' + } + mySession.headers = myHeader + + response = mySession.get(url, allow_redirects=True ) + self._log_communication('GET ', url, response.status_code) + + myText= response.content.decode() + myText1 = myText[myText.find('"csrf"')+8:myText.find('"csrf"')+300] + myCsrf = (myText1[:myText1.find(',')-1]) + + myText1 = myText[myText.find('nonce'):myText.find('nonce')+40] + myNonce = myText1.split('"')[1] + + myText1 = myText[myText.find('pageViewId'):myText.find('pageViewId')+60] + myPageViewID = myText1.split('"')[2] + + myReqPayload['pageViewId']=myPageViewID + + mySession.headers['x-csrf-token'] = myCsrf + mySession.headers['referer'] = url + mySession.headers['origin'] = 'https://prodindego.b2clogin.com' + mySession.headers['host'] = 'prodindego.b2clogin.com' + mySession.headers['x-requested-with'] = 'XMLHttpRequest' + mySession.headers['content-length'] = str(len(json.dumps(myPerfPayload))) + mySession.headers['content-type'] = 'application/json; charset=UTF-8' + mySession.headers['accept-language'] = 'en-US,en;q=0.9' + + + myState = mySession.cookies['x-ms-cpim-trans'] + myCookie = json.loads(base64.b64decode(myState).decode()) + myNewState = '{"TID":"'+myCookie['C_ID']+'"}' + myNewState = base64.b64encode(myNewState.encode()).decode()[:-2] + #'{"TID":"8912c0e6-defb-4d58-858b-27d1cfbbe8f5"}' + #eyJUSUQiOiI4OTEyYzBlNi1kZWZiLTRkNTgtODU4Yi0yN2QxY2ZiYmU4ZjUifQ + + + myUrl = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/B2C_1A_signup_signin/client/perftrace?tx=StateProperties={}&p=B2C_1A_signup_signin'.format(myNewState) + response=mySession.post(myUrl,data=json.dumps(myPerfPayload)) + self._log_communication('GET ', myUrl, response.status_code) + + + myUrl = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/B2C_1A_signup_signin/api/CombinedSigninAndSignup/unified' + mySession.headers['accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9' + mySession.headers['accept-encoding'] = 'gzip, deflate, br' + mySession.headers['upgrade-insecure-requests'] = '1' + mySession.headers['sec-fetch-mode'] = 'navigate' + mySession.headers['sec-fetch-dest'] = 'document' + mySession.headers['sec-fetch-user'] = '?1' + mySession.headers['sec-fetch-site'] = 'same-origin' + + del mySession.headers['content-length'] + del mySession.headers['content-type'] + del mySession.headers['x-requested-with'] + del mySession.headers['x-csrf-token'] + del mySession.headers['origin'] + + myParams = { + 'claimsexchange': 'BoschIDExchange', + 'csrf_token': myCsrf, + 'tx': 'StateProperties=' + myNewState, + 'p': 'B2C_1A_signup_signin', + 'diags': myReqPayload + } + # Get the redirect-URI + response = mySession.get(myUrl,allow_redirects=False,params=myParams) + self._log_communication('GET ', myUrl, response.status_code) + try: + if (response.status_code == 302): + myText = response.content.decode() + myText1 = myText[myText.find('href') + 6:] + myNewUrl = myText1.split('"')[0].replace('&','&') + else: + pass + except: + pass + + mySession.headers['sec-fetch-site'] = 'cross-site' + mySession.headers['host'] = 'identity.bosch.com' + + # Get the CIAMIDS + response = mySession.get(myNewUrl,allow_redirects=True) + self._log_communication('GET ', myNewUrl, response.status_code) + try: + if (response.history[0].status_code != 302): + pass + else: + myNewUrl = response.history[0].headers['location'] + except: + pass + + # Signin to Session + response = mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ', myNewUrl, response.status_code) + + # Authorize -IDS + myNewUrl = response.headers['location'] + mySession.headers['host'] = 'identity-myprofile.bosch.com' + mySession.headers['upgrade-insecure-requests']='1' + cookie_obj = requests.cookies.create_cookie(domain="identity-myprofile.bosch.com",name=".AspNetCore.Identity.Application",value="CfDJ8BbTLtgL3GFMvpWDXN913TQqlMWfpnzYNGGcX0qV3_e1mcxyuYndGzcNXVwoAHCyvY3Ad_1bkYnLsg-J56IdLUNQVMguFnS_KWkPbzib4u6SQtdCZfbiIPV_ZUh4xK-Pd-LgJ61Fi4ljxbb4CewKJRAaDyOhS7KPUu68EVdzte3mEYGm2z8PeSvViW6cGgQeIIOcJ1G3f7XG_s2synfm4o6MDA49a1WnkBIk1kXBodq-vKYXZNMLHOtNGVNE2aZ_k5b9E4mGQVeuncw6SupEku9dCXgO0tRRFK0qUX-41JVrgQdz5v4c_4NB--i1U1b7LUmoZrTtkv0a5KcGPTGz9cZqV5D_Ki4p5uoQxZCmDBPbyecSe6xF3m4yGpEC6hTfrOEJR4LdX6mnppjnXMSc1Y9Pr0Lui3FGeBGuK8GyT4QXJ-pnFrLyF8dh6g2ovkeRvI8MlS5DLSLy_d0s2nOgUxVQPxDsVCxtIMJhE14tSUnC9oRDB_6YUxOqMTEJ_dFacHt-s4iLD2ClBLtA6MsDQcF5pYe4ZOt9zLMuLcoO1NqD3Ca0r00Y0qdkGFGvckp5Xqf7QndkcZxKMPE3GtfH8o6uMsFd7hs1xstxBlT2pgrp0fjjk5R8ugOzJDv-BXarCbjXTzLJtAMVYO4dzorJ7xnXAZDK4IczfXIgxZliwOnTCBvwGIx5CHZfnkYlfhS1PbOE0bwR-sqvJXCS8Jmh6BjmSPHcoKxWxJbLa_wok5HsYmOJgQhVE49WgwuBV88sFvoxpnK_pp1IRR0jFfnV4stT905lkd8hNj5D8o3aZ35sHZDuNPYEXFNUPDORoFnfHkNAP33r126a00n-fLLjaBhFa7W5PnPDaD-M-luVP7nIL-c2tlVon_XRZRC5KMzO4FuOqCeCFwsh3jTtpJk5_iUS4EpHvHT5ldZtRVShC2uzZQ63N_LWl5KZwVlWXPCaLECCZwsGfaAJz0HKDlC-vgXuWL7odJKInmIsi4BJeM9xe280pPDwD6FNUhSOAM2GZgCAW2jilScn5hA2pS1HsLD9yLV0-80Rk9UR9RmRt7USsIOf_7qFMnijAV3MZq9wNKt7ZTBDCI40dxQ1WCYSUV0") + mySession.cookies.set_cookie(cookie_obj) + response = mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ', myNewUrl, response.status_code) + + # Get the login page with redirect URI + returnUrl = myNewUrl + myNewUrl='https://identity-myprofile.bosch.com/ids/login?ReturnUrl='+returnUrl + response=mySession.get(myNewUrl,allow_redirects=True) + self._log_communication('GET', myNewUrl, response.status_code) + myText = response.content.decode() + # find all the needed values + RequestVerificationToken = myText[myText.find('__RequestVerificationToken'):myText.find('__RequestVerificationToken')+300].split('"')[4] + postData = { + 'meta-information' : '', + 'uEmail' : self.user, + 'uPassword' : self.password, + 'ReturnUrl' : returnUrl[36:58]+'/callback'+returnUrl[58:], + '__RequestVerificationToken' : RequestVerificationToken + } + mySession.headers['content-type'] = 'application/x-www-form-urlencoded' + mySession.headers['sec-fetch-sites'] = 'same-origin' + mySession.headers['origin'] = '' + response=mySession.post(myNewUrl,data=postData,allow_redirects=True) + self._log_communication('POST ', myNewUrl, response.status_code) + + ######################################### + mySession.headers['pragma'] = 'no-cache' + mySession.headers['request-context'] = response.history[0].headers['request-context'] + mySession.headers['host'] = 'identity.bosch.com' + myNewUrl = response.history[1].headers['location'] + + # Collect next Cookie + response = mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ', myNewUrl, response.status_code) + + #Get Location for autorization + myNewUrl = 'https://identity.bosch.com/callback' + response=mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET', myNewUrl, response.status_code) + myNewUrl = response.headers['location'] + + #Get Authorize-Informations + response = mySession.get(myNewUrl,allow_redirects=False) + self._log_communication('GET ', myNewUrl, response.status_code) + + # Get the post-Fields + myText= response.content.decode() + myCode = myText[myText.find('"code"')+14:myText.find('"code"')+300].split('"')[0] + mySessionState = myText[myText.find('"session_state"')+23:myText.find('"session_state"')+300].split('"')[0] + myState = myText[myText.find('"state"')+15:myText.find('"state"')+300].split('"')[0] + + request_body = {"code" : myCode, "state" : myState, "session_state=" : mySessionState } + + mySession.headers['host'] = 'prodindego.b2clogin.com' + mySession.headers['origin'] = 'https://identity.bosch.com' + mySession.headers['content-type'] = 'application/x-www-form-urlencoded' + mySession.headers['cache-control'] = 'max-age=0' + + del mySession.headers['pragma'] + del mySession.headers['request-context'] + + myNewUrl='https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/oauth2/authresp' + response = mySession.post(myNewUrl,data=request_body,allow_redirects=False) + self._log_communication('POST ', myNewUrl, response.status_code) + myNewUrl = response.headers['location'] + + myFinalCode = myNewUrl.split("code")[1].split("=")[1] + + + # Get the new Login-Page + url = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/authorize?redirect_uri=com.bosch.indegoconnect%3A%2F%2Flogin&client_id={}&response_type=code&state=j1A8L2zQMbolEja6yqbj4w&nonce=LtRKgCy_l1abdbKPuf5vhA&scope=openid%20profile%20email%20offline_access%20https%3A%2F%2Fprodindego.onmicrosoft.com%2Findego-mobile-api%2FIndego.Mower.User&code_challenge={}&code_challenge_method=S256'.format(myClientID,code_challenge) + mySession.headers['host'] = 'prodindego.b2clogin.com' + del mySession.headers['content-type'] + del mySession.headers['origin'] + del mySession.headers['referer'] + response = mySession.get(url,allow_redirects=False) + self._log_communication('GET ', url, response.status_code) + + # Now Post for a token + mySession.close() + request_body = { + 'code' : myFinalCode, + 'grant_type' : 'authorization_code', + 'redirect_uri' : 'com.bosch.indegoconnect://login', + 'code_verifier' : code_verifier, + 'client_id' : myClientID + } + url = 'https://prodindego.b2clogin.com/prodindego.onmicrosoft.com/b2c_1a_signup_signin/oauth2/v2.0/token' + mySession = requests.session() + mySession.headers['accept'] = 'application/json' + mySession.headers['accept-encoding'] = 'gzip' + mySession.headers['connection'] = 'Keep-Alive' + mySession.headers['content-type'] = 'application/x-www-form-urlencoded' + mySession.headers['host'] = 'prodindego.b2clogin.com' + mySession.headers['user-agent'] = 'Dalvik/2.1.0 (Linux; U; Android 11; sdk_gphone_x86_arm Build/RSR1.201013.001)' + + response = mySession.post(url,data=request_body) + self._log_communication('POST ', url, response.status_code) + myJson = json.loads (response.content.decode()) + self._refresh_token = myJson['refresh_token'] + self._bearer = myJson['access_token'] + self.token_expires = myJson['expires_in'] + + + + url='https://api.indego-cloud.iot.bosch-si.com/api/v1/alms' + myHeader = {'accept-encoding' : 'gzip', + 'authorization' : 'Bearer '+ myJson['access_token'], + 'connection' : 'Keep-Alive', + 'host' : 'api.indego-cloud.iot.bosch-si.com', + 'user-agent' : 'Indego-Connect_4.0.0.12253' + } + response = requests.get(url, headers=myHeader,allow_redirects=True ) + self._log_communication('GET ', url, response.status_code) + if (response.status_code == 200): + myJson = json.loads (response.content.decode()) + self.alm_sn = myJson[0]['alm_sn'] + self.login_pending = False + self.last_login_timestamp = datetime.timestamp(datetime.now()) + self.expiration_timestamp = self.last_login_timestamp + self.token_expires + return True + else: + return False + + + def _get_predictive_calendar(self): ''' GET diff --git a/indego4shng/plugin.yaml b/indego4shng/plugin.yaml index ce72c7a48..8b519f87e 100755 --- a/indego4shng/plugin.yaml +++ b/indego4shng/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: http://smarthomeng.de/user/plugins_doc/config/indego.html # url of documentation (wiki) page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/966612-indego-connect - version: 3.0.2 # Plugin version + version: 4.0.0 # Plugin version sh_minversion: 1.6 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: False # plugin supports multi instance diff --git a/indego4shng/webif/static/img/garden.svg b/indego4shng/webif/static/img/garden.svg deleted file mode 100755 index 203ee06b7..000000000 --- a/indego4shng/webif/static/img/garden.svg +++ /dev/null @@ -1,5 +0,0 @@ -XSym -0045 -b7d40008153fcf068cc1330d47e0b2b6 -/var/www/html/smartVISU2.9/dropins/garden.svg - \ No newline at end of file diff --git a/indego4shng/webif/static/img/garden.svg b/indego4shng/webif/static/img/garden.svg new file mode 120000 index 000000000..6b11a6546 --- /dev/null +++ b/indego4shng/webif/static/img/garden.svg @@ -0,0 +1 @@ +/var/www/html/smartVISU2.9/dropins/garden.svg \ No newline at end of file diff --git a/influxdata/README.md b/influxdata/README.md index b1874c47c..001157ea7 100755 --- a/influxdata/README.md +++ b/influxdata/README.md @@ -48,8 +48,7 @@ For more information on buffers and how to setup high performance UDP listener s ```yaml influxdata: - class_name: InfluxData - class_path: plugins.influxdata + plugin_name: influxdata # influx_host = localhost # influx_port = 8089 influx_keyword: influx diff --git a/influxdb/README.md b/influxdb/README.md index 954f76ebd..91f05d339 100755 --- a/influxdb/README.md +++ b/influxdb/README.md @@ -1,4 +1,4 @@ -# Influxdb +# influxdb ## Logging to InfluxDB over UDP or HTTP @@ -43,8 +43,7 @@ you can setup global tags and fields (JSON encoded) ```yaml influxdb: - class_name: InfluxDB - class_path: plugins.influxdb + plugin_name: influxdb # host: localhost # udp_port: 8089 # keyword: influxdb diff --git a/join/README.md b/join/README.md index a2b2c9a7e..82d54611c 100755 --- a/join/README.md +++ b/join/README.md @@ -1,4 +1,4 @@ -# Join +# join Version 1.0 @@ -21,8 +21,7 @@ sudo pip3 install requests --upgrade ```yaml join: - class_name: Join - class_path: plugins.join + plugin_name: join device_id: api_key: ``` diff --git a/jvcproj/README.md b/jvcproj/README.md index 405ec0f32..6df5e38e3 100755 --- a/jvcproj/README.md +++ b/jvcproj/README.md @@ -1,4 +1,4 @@ -# JVC D-ILA Control +# jvcproj - JVC D-ILA Control With this plugin you can control JVC D-ILA projectors over TCP by using the "JVC External Control Command Communication Specification" and transfer gammatables generated with jvcprojectortools. @@ -17,8 +17,7 @@ Please use [this thread for support, questions, feedback etc.](https://knx-user- ```yaml jvcproj: - class_name: JVC_DILA_Control - class_path: plugins.jvcproj + plugin_name: jvcproj host: 1.1.1.1 # host address of the projector gammaconf_dir: ... # optional, location gamma table configuration files ``` diff --git a/kathrein/README.md b/kathrein/README.md index a67c71ca5..a976220f5 100755 --- a/kathrein/README.md +++ b/kathrein/README.md @@ -1,4 +1,4 @@ -# Kathrein +# kathrein ## Requirements This plugin has no requirements or dependencies. @@ -13,8 +13,7 @@ The webinterface needs to be implemented ```yaml kathrein: - class_name: Kathrein - class_path: plugins.kathrein + plugin_name: kathrein host: 192.168.0.149 # port: 9000 # kathreinid: 1 diff --git a/kostal/README.md b/kostal/README.md index 787bc9482..901e2bf4e 100755 --- a/kostal/README.md +++ b/kostal/README.md @@ -1,4 +1,4 @@ -# KOSTAL +# kostal ### Version: 1.3.1.2 @@ -55,8 +55,7 @@ The plugin can be configured like this: ```yaml Kostal_PV: - class_name: Kostal - class_path: plugins.kostal + plugin_name: kostal ip: 192.168.1.21 user: pvserver passwd: pvwr diff --git a/lirc/__init__.py b/lirc/__init__.py index abee5760c..caf1e6de2 100755 --- a/lirc/__init__.py +++ b/lirc/__init__.py @@ -2,9 +2,12 @@ # vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab ######################################################################### # Copyright 2017 Nino Coric mail2n.coric@gmail.com +# Copyright 2019- Andreas Künz onkelandy66@gmail.com ######################################################################### # This file is part of SmartHomeNG. # +# lirc plugin for USB remote +# # SmartHomeNG is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or @@ -24,6 +27,8 @@ import logging import time from lib.network import Tcp_client +from .webif import WebInterface +from lib.item import Items from lib.model.smartplugin import SmartPlugin from bin.smarthome import VERSION @@ -32,11 +37,10 @@ class LIRC(SmartPlugin): ALLOW_MULTIINSTANCE = True - PLUGIN_VERSION = "1.5.0" + PLUGIN_VERSION = "1.5.1" def __init__(self, smarthome): - if '.'.join(VERSION.split('.', 2)[:2]) <= '1.5': - self.logger = logging.getLogger(__name__) + super().__init__() self._host = self.get_parameter_value('host') if self._host is None: self._host = self.get_parameter_value('lirc_host') @@ -45,6 +49,7 @@ def __init__(self, smarthome): self._connect_retries = self.get_parameter_value('connect_retries') self._connect_cycle = self.get_parameter_value('connect_cycle') name = 'plugins.' + self.get_fullname() + self.items = Items.get_instance() self._lirc_tcp_connection = Tcp_client(host=self._host, port=self._port, name=name, @@ -64,6 +69,8 @@ def __init__(self, smarthome): self._parseLine = 0 self._error = False self._lirc_server_alive = False + if self._init_complete: + self.init_webinterface(WebInterface) def run(self): self.alive = True @@ -87,6 +94,7 @@ def parse_item(self, item): if self.has_iattr(item.conf, REMOTE_ATTRS[0]) and \ self.has_iattr(item.conf, REMOTE_ATTRS[1]): self.logger.debug("{}: callback assigned".format(item)) + self.add_item(item, config_data_dict={'lirc': True}) return self.update_item return None @@ -162,7 +170,9 @@ def _on_received_data(self, connection, response): def update_item(self, item, caller=None, source=None, dest=None): val = item() - if val == 0: + if val == 0 and source == "Web Interface": + val = 1 + elif val == 0: return None item(0) if val < 0: @@ -172,21 +182,12 @@ def update_item(self, item, caller=None, source=None, dest=None): key = self.get_iattr_value(item.conf,REMOTE_ATTRS[1]) self.logger.debug("update_item {}, val: {}, remote: {}, key: {}".format(item, val, remote, key)) command = "SEND_ONCE {} {} {}".format(remote,key,val) - self.logger.debug("command: {}".format(command)) - self._send(command) + return self._send(command, item, True) def request_version(self): - self._lircd_version = self._send("VERSION", True) - if self._lircd_version: - self.logger.info("connected to lircd {} on {}:{}".format( \ - self._lircd_version.replace("VERSION\n","").replace("\n",""), \ - self._host,self._port)) - return True - else: - self.logger.error("lircd Version not detectable") - return False + self._send("VERSION", None, True) - def _send(self, command, reply=True): + def _send(self, command, item=None, reply=True): i = 0 while not self._lirc_server_alive: self.logger.debug("Waiting to send command {} as connection is not yet established. Count: {}/10".format(command, i)) @@ -204,14 +205,23 @@ def _send(self, command, reply=True): self._reply_lock.wait(1) self._reply_lock.release() self._cmd_lock.release() + try: + self._responseStr = self._responseStr.replace("\n", "") + except Exception: + pass if self._error: - self.logger.error("error from lircd: {}".format(self._responseStr.replace("\n"," "))) + self.logger.error("error from lircd: {}".format(self._responseStr)) self._error = False elif isinstance(self._responseStr, str): - self.logger.debug("response: {}".format(self._responseStr.replace("\n"," "))) + self.logger.debug("response: {}".format(self._responseStr.replace("\n",""))) + if command == "VERSION": + self._lircd_version = self._responseStr + self.logger.info("connected to lircd {} on {}:{}".format( \ + self._lircd_version.replace("VERSION\n","").replace("\n",""), \ + self._host, self._port)) return self._responseStr if __name__ == '__main__': myplugin = LIRC('smarthome-dummy') logging.basicConfig(level=logging.DEBUG, format='%(relativeCreated)6d %(threadName)s %(message)s') - myplugin._send('VERSION') + myplugin._send('VERSION', None, True) diff --git a/lirc/assets/lirc_webif.png b/lirc/assets/lirc_webif.png new file mode 100644 index 000000000..5c73e388e Binary files /dev/null and b/lirc/assets/lirc_webif.png differ diff --git a/lirc/locale.yaml b/lirc/locale.yaml new file mode 100755 index 000000000..b3aed7dc8 --- /dev/null +++ b/lirc/locale.yaml @@ -0,0 +1,18 @@ +# translations for the web interface +plugin_translations: + # Translations for the plugin specially for the web interface + 'Host': {'de': '=', 'en': '='} + 'Port': {'de': '=', 'en': '='} + 'Version': {'de': '=', 'en': '='} + 'Verbindung': {'de': '=', 'en': 'Connection'} + 'Autoreconnect': {'de': '=', 'en': '='} + 'Reconnect Details': {'de': '=', 'en': '='} + 'Retries': {'de': '=', 'en': '='} + 'Letzte Antwort': {'de': '=', 'en': 'Last Response'} + 'Senden': {'de': '=', 'en': 'Send'} + 'Item': {'de': '=', 'en': '='} + 'Remote': {'de': '=', 'en': '='} + 'Key': {'de': '=', 'en': '='} + 'Cycle': {'de': '=', 'en': '='} + 'Letztes Update': {'de': '=', 'en': 'Last Update'} + 'Letzte Änderung': {'de': '=', 'en': 'Last Change'} diff --git a/lirc/plugin.yaml b/lirc/plugin.yaml index 044639dd7..c5c74f0ba 100755 --- a/lirc/plugin.yaml +++ b/lirc/plugin.yaml @@ -25,10 +25,10 @@ plugin: tester: onkelandy keywords: infrared, lirc, remote state: ready - documentation: https://www.smarthomeng.de/user/plugins/lirc/user_doc.html + restartable: True support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1149875-neues-plugin-lirc - version: 1.5.0 # Plugin version + version: 1.5.1 # Plugin version sh_minversion: 1.3 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: True # plugin supports multi instance @@ -103,6 +103,8 @@ item_attributes: de: 'Name der Taste auf der Fernbedienung. Der Name muss dem Namen der Fernbedienung in lircd entsprechen.' en: 'The name of the key on the given remote. This name has to match the name of the key in lircd. ' +item_structs: NONE + logic_parameters: NONE # Definition of logic parameters defined by this plugin diff --git a/lirc/user_doc.rst b/lirc/user_doc.rst index 8acd92e2f..5fad4764c 100755 --- a/lirc/user_doc.rst +++ b/lirc/user_doc.rst @@ -51,4 +51,21 @@ Beispiel: lirc_key@instancename: "POWER" Wird DVDLIVINGROOM_POWER auf 5 gesetzt, wird der "POWER" Befehl 5 Mal gesendet. -Bekommt das Item den Wert 1, wird der Befehl ein Mal gesendet. +Bekommt das Item den Wert 0 oder 1, wird der Befehl ein Mal gesendet. + + +Web Interface +============= + +.. image:: assets/lirc_webif.png + :class: screenshot + +Das Webinterface bietet folgende Informationen: + +- **Allgemeines**: Oben rechts wird die Pluginkonfiguration angezeigt +- **Letzte Antwort**: Oben rechts wird die letzte Rückmeldung des Plugins angezeigt +- **Senden**: Es wird der hinterlegte Befehl für das Item gesendet +- **Remote**: Name der Fernbedienung +- **Key**: Name des Keys +- **Letztes Update**: Zeitpunkt, wann das Item zuletzt aktualisiert wurde +- **Letzte Änderung**: Zeitpunkt, wann das Item zuletzt geändert wurde diff --git a/lirc/webif/__init__.py b/lirc/webif/__init__.py new file mode 100755 index 000000000..17676604b --- /dev/null +++ b/lirc/webif/__init__.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2023- Andreas Künz onkelandy66@gmail.com +######################################################################### +# This file is part of SmartHomeNG. +# https://www.smarthomeNG.de +# https://knx-user-forum.de/forum/supportforen/smarthome-py +# +# lirc plugin for USB remote web interface +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +import datetime +import time +import os +import json + +from lib.item import Items +from lib.model.smartplugin import SmartPluginWebIf + + +# ------------------------------------------ +# Webinterface of the plugin +# ------------------------------------------ + +import cherrypy +import csv +from jinja2 import Environment, FileSystemLoader + + +class WebInterface(SmartPluginWebIf): + + def __init__(self, webif_dir, plugin): + """ + Initialization of instance of class WebInterface + + :param webif_dir: directory where the webinterface of the plugin resides + :param plugin: instance of the plugin + :type webif_dir: str + :type plugin: object + """ + self.logger = plugin.logger + self.webif_dir = webif_dir + self.plugin = plugin + self.items = Items.get_instance() + + self.tplenv = self.init_template_environment() + + + @cherrypy.expose + def index(self, reload=None): + """ + Build index.html for cherrypy + + Render the template and return the html file to be delivered to the browser + + :return: contents of the template after beeing rendered + """ + tmpl = self.tplenv.get_template('index.html') + pagelength = self.plugin.get_parameter_value('webif_pagelength') + # add values to be passed to the Jinja2 template eg: tmpl.render(p=self.plugin, interface=interface, ...) + return tmpl.render(p=self.plugin, + webif_pagelength=pagelength, + item_count=len(self.plugin.get_item_list('lirc', True)), + items=self.plugin.get_item_list('lirc', True)) + + + @cherrypy.expose + def get_data_html(self, dataSet=None): + """ + Return data to update the webpage + + For the standard update mechanism of the web interface, the dataSet to return the data for is None + + :param dataSet: Dataset for which the data should be returned (standard: None) + :return: dict with the data needed to update the web page. + """ + if dataSet is None: + # get the new data + data = {'response': self.plugin._responseStr, 'items': {}} + for item in self.plugin.get_item_list('lirc', True): + data['items'].update({item.id(): {'last_update': item.property.last_update.strftime('%d.%m.%Y %H:%M:%S'), 'last_change': item.property.last_change.strftime('%d.%m.%Y %H:%M:%S')}}) + try: + return json.dumps(data) + except Exception as e: + self.logger.error(f"get_data_html exception: {e}") + return {} + + @cherrypy.expose + def submit(self, item=None): + result = None + if item is not None: + item = self.plugin.items.return_item(item) + self.logger.debug(f"Sending remote signal for {item} via web interface") + result = self.plugin.update_item(item, caller=None, source='Web Interface', dest=None) + + if result is not None: + # JSON zurücksenden + cherrypy.response.headers['Content-Type'] = 'application/json' + self.logger.debug(f"Result for web interface: {result}") + return json.dumps(result).encode('utf-8') diff --git a/lirc/webif/templates/index.html b/lirc/webif/templates/index.html new file mode 100755 index 000000000..05db5474d --- /dev/null +++ b/lirc/webif/templates/index.html @@ -0,0 +1,165 @@ +{% extends "base_plugin.html" %} +{% set update_interval = 2000 %} +{% block pluginstyles %} + +{% endblock pluginstyles %} +{% block pluginscripts %} + + + +{% endblock pluginscripts %} +{% set logo_frame = false %} + +{% set tab1title = "" ~ p.get_shortname() ~ " Items (" ~ item_count ~ ")" %} +{% set tabcount = 1 %} + +{% block headtable %} + + + + + + + + + + + + + + + + + + + + + + + + + +
{{ _('Host') }}{{ p._host }}{{ _('Port') }}{{ p._port }}
{{ _('Version') }}{{ p._lircd_version }}{{ _('Verbindung') }}{{ p._lirc_server_alive }}
{{ _('Autoreconnect') }}{{ p._autoreconnect }}{% if p._autoreconnect == true %}{{ _('Reconnect Details') }}{% endif %}{% if p._autoreconnect == true %}{{ _('Retries') }}: {{ p._connect_retries }}, + {{ _('Cycle') }}: {{ p._connect_cycle }}{% endif %}
{{ _('Letzte Antwort') }}{{ p._responseStr }}
+{% endblock headtable %} + +{% block bodytab1 %} + + + {% for item in items %} + + + + + + + + + + {% endfor %} + +
{{ item.property.path }} + {{ p.get_iattr_value(item.conf, 'lirc_remote') }}{{ p.get_iattr_value(item.conf, 'lirc_key') }}{{ item.property.last_update.strftime('%d.%m.%Y %H:%M:%S') }}{{ item.property.last_change.strftime('%d.%m.%Y %H:%M:%S') }}
+
+ +
+{% endblock bodytab1 %} diff --git a/logo/README.md b/logo/README.md index f491798af..991c1bffd 100755 --- a/logo/README.md +++ b/logo/README.md @@ -33,16 +33,14 @@ Sample configuration file for two instances of the logo plugin. ```yaml logo1: - class_name: LOGO - class_path: plugins.logo + plugin_name: logo host: 10.10.10.99 instance: logo1 # port: 102 # io_wait: 5 # version: 0BA8 logo2: - class_name: LOGO - class_path: plugins.logo + plugin_name: logo host: 10.10.10.100 version: 0BA8 instance: logo2 diff --git a/luxtronic2/README.md b/luxtronic2/README.md index 5edbabdd7..9887e76b7 100755 --- a/luxtronic2/README.md +++ b/luxtronic2/README.md @@ -1,4 +1,4 @@ -# Luxtronic2 +# luxtronic2 ## Requirements This plugin has no requirements or dependencies. @@ -9,8 +9,7 @@ This plugin has no requirements or dependencies. ```yaml luxtronic2: - class_name: Luxtronic2 - class_path: plugins.luxtronic2 + plugin_name: luxtronic2 host: 192.168.0.123 # port: 8888 ``` diff --git a/memlog/README.md b/memlog/README.md index 6e3840738..7dcf8c289 100755 --- a/memlog/README.md +++ b/memlog/README.md @@ -1,4 +1,4 @@ -# MemLog +# memlog This plugins can be used to create in-memory logs which can be used by items or other plugins. @@ -15,8 +15,7 @@ Use the plugin configuration to configure the in-memory logs. ``` memlog: - class_name: MemLog - class_path: plugins.memlog + plugin_name: memlog name: alert mappings: - time diff --git a/mieleathome/README.md b/mieleathome/README.md new file mode 100755 index 000000000..f02766aa1 --- /dev/null +++ b/mieleathome/README.md @@ -0,0 +1,116 @@ +# mieleathome + +## Version 1.0.0 + +Das Plugin ermöglicht den Zugriff auf die Miele@Home API. Es werden Stati abgefragt und +im Rahmen der Möglichkeiten der API können Geräte gesteuert werden. +Es wird das Pollen von Informationen sowie das Event-gestütze Empfangen von Daten unterstützt. +Für das Event-Listening wird ein Stream-request zum Miele-Server aufgebaut. Falls durch den Trennung der +Internet-Verbindung der Stream abreisst wird dies durch das Plugin erkannt und eine neuer Stream +aufgebaut. + + +## table of content + +1. [Change Log](#changelog) +2. [Aktivierung des Zugriffs für 3rd party-Apps](#activate) +3. [Einstellungen in der plugin.yaml](#plugin_yaml) +4. [Ermittln der Device-ID´s](#device_id) +5. [Items definieren](#create_items) +6. [Darstellung in der VISU](#visu) +7. [known issues](#issues) + +## ChangeLog + +### 2021-11-21 +- Version 1.0.0 +- first Commit für Tests +- Bedienen und Überwachen von Trocknern und Gefrierschränken ist implementiert +- Folgende Funktionen sind realisiert + + - Status + - programPhase + - programType + - remainingTime + - targetTemperature + - temperature + - signalInfo + - signalFailure + - signalDoor + - dryingStep + - elapsedTime + - ecoFeedback + - batteryLevel + - processAction ( start / stop / pause / start_superfreezing / stop_superfreezing / start_supercooling / stop_supercooling / PowerOn / PowerOff) + + +### Todo in Version 1.0.0 + +- Verarbeitung von "Programmen" +- Verarbeitung von "ambientLight", "light", "ventilationStep", "colors" +- Verarbeiten von "modes" + +## Aktivierung des Zugriffs für 3rd party-Apps + + +Eine App unter https://www.miele.com/f/com/en/register_api.aspx registrieren. Nach Erhalt der Freischalt-Mail die Seite aufrufen und das Client-Secret und die Client-ID kopieren und merken (speichern). +Dann einmalig über das Swagger-UI der API (https://www.miele.com/developer/swagger-ui/swagger.html) mittels Client-ID und Client-Secret über den Button "Authorize" (in grün, auf der rechten Seite) Zugriff erteilen. Wenn man Client-Id und Client-Secret eingetragen hat wird man einmalig aufgefordert mittels mail-Adresse, Passwort und Land der App-Zugriff zu erteilen. + +Die erhaltenen Daten für Client-ID und Client-Secret in der ./etc/plugin.yaml wie unten beschrieben eintragen. + +##Settings für die /etc/plugin.yaml + +

+mieleathome:
+    plugin_name: mieleathome
+    class_path: plugins.mieleathome
+    miele_cycle: 120
+    miele_client_id: ''
+    miele_client_secret: ''
+    miele_client_country: 'de-DE'
+    miele_user: ''      # email-Adress
+    miele_pwd: ''       # Miele-PWD
+
+ +## Ermitteln der benötigten Device-ID´s
+ +Das Plugin kann ohne item-Definitionen gestartet werden. Sofern gültige Zugangsdaten vorliegen +werden die registrierten Mielegeräte abgerufen. Die jeweiligen Device-Id´s können im WEB-IF auf dem +zweiten Tab eingesehen werden. + +## Anlegen der Items + +Es wird eine vorgefertigtes "Struct" für alle Geräte mitgeliefert. Es muss lediglich die Miele-"DeviceID" beim jweiligen Gerät +erfasst werden. Um die Miele-"DeviceID" zu ermitteln kann das Plugin ohne Items eingebunden und gestartet werden. Es werden im Web-IF +des Plugins alle registrierten Geräte mit der jeweiligen DeviceID angezeigt. +Führende Nullen der DeviceID sind zu übernehmen + +
+
+%YAML 1.1
+---
+MieleDevices:
+    Freezer:
+        type: str
+        miele_deviceid: 'XXXXXXXXXXX'
+        struct: mieleathome.child
+    Dryer:
+        type: str
+        miele_deviceid: 'YYYYYYYYYYY'
+        struct: mieleathome.child        
+
+
+
+ + + +## Darstellung in der VISU
+ +Es gibt eine vorgefertigte miele.html im Plugin-Ordner. Hier kann man die jeweiligen Optionen herauslesen und nach +den eigenen Anforderungen anpassen und in den eigenen Seiten verwenden. + +## known issues +### Trockner : +Ein Trockner kann nur im Modus "SmartStart" gestartet werden. +Es muss der SmartGrid-Modus aktiv sein und das Gerät auf "SmartStart" eingestellt werden. +Der Trockner kann dann via API/Plugin gestartet werden bzw. es kann eine Startzeit via API/Plugin gesetzt werden diff --git a/mieleathome/__init__.py b/mieleathome/__init__.py new file mode 100755 index 000000000..b6f65cd2d --- /dev/null +++ b/mieleathome/__init__.py @@ -0,0 +1,638 @@ +#!/usr/bin/env python3 +# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab +######################################################################### +# Copyright 2020- +######################################################################### +# This file is part of SmartHomeNG. +# https://www.smarthomeNG.de +# https://knx-user-forum.de/forum/supportforen/smarthome-py +# +# Sample plugin for new plugins to run with SmartHomeNG version 1.5 and +# upwards. +# +# SmartHomeNG is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SmartHomeNG is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SmartHomeNG. If not, see . +# +######################################################################### + +from lib.model.smartplugin import * +from lib.item import Items + +from .webif import WebInterface + +import requests +import json +import time +import threading +from datetime import datetime, timedelta + +import sys + +# If a needed package is imported, which might be not installed in the Python environment, +# add it to a requirements.txt file within the plugin's directory + + +class mieleathome(SmartPlugin): + """ + Main class of the Plugin. Does all plugin specific stuff and provides + the update functions for the items + """ + + PLUGIN_VERSION = '1.0.0' # (must match the version specified in plugin.yaml), use '1.0.0' for your initial plugin Release + + def __init__(self, sh): + """ + Initalizes the plugin. + + If you need the sh object at all, use the method self.get_sh() to get it. There should be almost no need for + a reference to the sh object any more. + + Plugins have to use the new way of getting parameter values: + use the SmartPlugin method get_parameter_value(parameter_name). Anywhere within the Plugin you can get + the configured (and checked) value for a parameter by calling self.get_parameter_value(parameter_name). It + returns the value in the datatype that is defined in the metadata. + """ + self.logger = logging.getLogger(__name__) + self.sh = self.get_sh() + self.items = Items.get_instance() + self.auth = False + self.AccessToken = '' + self.RefreshToken = '' + self.Expiration = 0 + self.all_devices = {} + self.miele_devices_by_deviceID = {} + self.miele_devices_by_item = {} + self.miele_device_by_action = {} + self.miele_parsed_item = {} + self.miele_items = [] + self.miele_devices_raw = [] + + + + + # Call init code of parent class (SmartPlugin) + super().__init__() + + # get the parameters for the plugin (as defined in metadata plugin.yaml): + # self.param1 = self.get_parameter_value('param1') + + # cycle time in seconds, only needed, if hardware/interface needs to be + # polled for value changes by adding a scheduler entry in the run method of this plugin + # (maybe you want to make it a plugin parameter?) + self.client_id = self.get_parameter_value('miele_client_id') + self.client_secret = self.get_parameter_value('miele_client_secret') + self.country = self.get_parameter_value('miele_client_country') + self._cycle = self.get_parameter_value('miele_cycle') + self.user = self.get_parameter_value('miele_user') + self.pwd = self.get_parameter_value('miele_pwd') + + self.ValidFrom = '' #Time and date when (new) tokens were received + self.ValidThrough = '' #Time and date when tokens will expire + self.ValidFor = 0 #Timeframe in days for validity of tokens + self.last_ping_time = '' #Time of last Ping from Event-Listener + self.last_ping_timestamp = datetime.now() #Time of last Ping from Event-Listener + self.last_event_time ='' #Time of last Event from Event-Listener + self.last_event_action = {} #Last dict for event_action + self.last_event_device = {} #Last dict for event_device + + self.Url='https://api.mcs3.miele.com/v1' + self.event_server = None + self.auth = self._auth() + + # Initialization code goes here + + # On initialization error use: + # self._init_complete = False + # return + + # if plugin should start even without web interface + self.init_webinterface(WebInterface) + # if plugin should not start without web interface + # if not self.init_webinterface(): + # self._init_complete = False + + return + + def run(self): + """ + Run method for the plugin + """ + self.logger.debug("Run method called") + + self.alive = True + # setup scheduler for device poll loop (disable the following line, if you don't need to poll the device. Rember to comment the self_cycle statement in __init__ as well) + if self.auth == True: + self._getalldevices() + self._getallDevices4Action() + self._getMainItem4parseItem() + + self.event_server = miele_event(self.logger, self.Url, self.AccessToken, self) + self.event_server.name = "mieleEventListener" + self.event_server.start() + self.scheduler_add('poll_device', self.poll_device, cycle=self._cycle) + + # if you need to create child threads, do not make them daemon = True! + # They will not shutdown properly. (It's a python bug) + + myTokenRefresh = (self.Expiration-100) + self.scheduler_add('_refreshToken',self._refreshToken,cycle = myTokenRefresh) + for device in self.miele_devices_by_deviceID: + myPayload = self._getActions4Device(device) + self._parseAction4Device(myPayload, device) + + + def stop(self): + """ + Stop method for the plugin + """ + self.alive = False + self.logger.debug("Stop method called") + self.scheduler_remove('poll_device') + self.scheduler_remove('_refreshToken') + self.event_server.alive = False + self.event_server.stop() + #self.event_server.join() + + + + def _getallDevices4Action(self): + for ItemName in self.miele_devices_by_item: + for Device in self.miele_device_by_action: + if ItemName in Device: + self.miele_device_by_action[Device] = self.miele_devices_by_item[ItemName] + + def _getMainItem4parseItem(self): + for ItemName in self.miele_parsed_item: + for Device in self.miele_devices_by_item: + if Device in ItemName: + self.miele_parsed_item[ItemName] = Device + + def _auth(self): + myHeaders = { "accept" : "application/json" } + + payload = {"grant_type": "password", + "password" :self.pwd, + "username" : self.user, + "client_id" : self.client_id, + "client_secret":self.client_secret, + "vg" :self.country + } + + myResult = requests.post(self.Url[:-3]+'/thirdparty/token/',data=payload,headers=myHeaders) + try: + if (myResult.status_code == 200): + myRespPayload=json.loads(myResult.content.decode()) + self.AccessToken = myRespPayload['access_token'] + self.RefreshToken = myRespPayload['refresh_token'] + self.Expiration = myRespPayload['expires_in'] + self.ValidFor = int(self.Expiration / 86400) #Timeframe in days for validity of tokens + self.ValidFrom = time.ctime(time.time()) #Time and date when (new) tokens were received + self.ValidThrough = time.ctime(time.time() + self.Expiration) #Time and date when tokens will expire + return True + except: + self.logger.warning("Error while authentication on {}".format(self.Url+'/thirdparty/token/')) + + return False + + def _refreshToken(self): + myHeaders = { + "Authorization" : "Bearer {}".format(self.AccessToken), + "Content-Type" : "application/x-www-form-urlencoded", + "accept": "application/json" + } + payload = { + "client_id" : self.client_id, + "client_secret" : self.client_secret, + "refresh_token" : self.RefreshToken, + "grant_type" :"refresh_token" + } + myResult = requests.post(self.Url+'/thirdparty/token/',data=payload,headers=myHeaders) + try: + if (myResult.status_code == 200): + myRespPayload=json.loads(myResult.content.decode()) + self.AccessToken = myRespPayload['access_token'] + self.event_server.access_token = self.AccessToken + self.RefreshToken = myRespPayload['refresh_token'] + self.Expiration = myRespPayload['expires_in'] + myTokenRefresh = (self.Expiration-100) + self.ValidFor = int(self.Expiration / 86400) #Timeframe in days for validity of tokens + self.ValidFrom = time.ctime(time.time()) #Time and date when (new) tokens were received + self.ValidThrough = time.ctime(time.time() + self.Expiration) #Time and date when tokens will expire + self.scheduler_change('_refreshToken', cycle={myTokenRefresh:None}) # Zum Testen von 6 auf 10 Sekunden geändert + self.auth = True + except: + self.logger.warning("Error while refresh Token on {}".format(self.Url+'/thirdparty/token/')) + self.auth = False + + def _parseAction4Device(self,myPayload, deviceId): + myItemParent = self.miele_devices_by_deviceID[deviceId] + # Parse Payload to Items + self._parseDict2Item(myPayload, myItemParent+'.actions') + for entry in myPayload: + myItem = self.items.return_item(myItemParent+'.actions.'+entry) + if myItem != None: + myItem(myPayload[entry],self.get_shortname()) + myItem = self.items.return_item(myItemParent+'.actions.processAction') + if myItem != None: + myAllowedValues= myItem() + # Set allowed Action for processAction + myActions = ['start','stop','pause','start_superfreezing','stop_superfreezing','start_supercooling','stop_supercooling'] + for i in range(1, 7): + myItemName = myItemParent+'.visu.allowed_actions.'+myActions[i-1] + myItem = self.items.return_item(myItemName) + if i in myAllowedValues: + myItem(True,self.get_shortname()) + else: + myItem(False,self.get_shortname()) + + + def _getalldevices(self): + myHeaders = { + "Authorization" : "Bearer {}".format(self.AccessToken) + } + + myUrl = self.Url + "/devices?language={}".format(self.country[0:2]) + myResult = requests.get(myUrl,headers=myHeaders, timeout=5.0) + try: + if (myResult.status_code == 200): + self.all_devices = json.loads(myResult.content.decode()) + self.logger.debug("Got all devices from Miele-Cloud - start parsing to Items") + self._parseAllDevices(self.all_devices) + self.logger.debug("Got all devices from Miele-Cloud - stopped parsing to Items") + else: + pass + except Exception as err: + self.all_devices = {} + self.logger.warning("Error while getting devices from {}".format(myUrl)) + + def _parseAllDevices(self,myPayload): + ''' + ''' + ''' + !!! Change "type" to "device_type" in payload - shNG does not allow Items with Name "type" because its an attribute + ''' + myDummy = json.dumps(myPayload) + myDummy = myDummy.replace('"type"','"device_type"') + myPayload = json.loads(myDummy) + self.miele_devices_raw = [] + for myDevice in myPayload: + try: + self._parseDict2Item(myPayload[myDevice],self.miele_devices_by_deviceID[myDevice]) + myObj = {} + myObj['DeviceID'] = myDevice + myObj['DeviceTyp'] = myPayload[myDevice]['ident']['device_type']['value_localized'] + myObj['DeviceModel'] = myPayload[myDevice]['ident']['deviceIdentLabel']['techType'] + + self.miele_devices_raw.append(myObj) + except Exception as err: + self.logger.warning("Error while Updating Device :{}".format(myDevice)) + pass + + + def _parseDict2Item(self, my_dict,my_item_path): + for entry in my_dict: + if type(my_dict[entry]) is dict: + self._parseDict2Item(my_dict[entry],my_item_path+'.'+entry) + else: + if type(my_dict[entry]) is list : + if len(my_dict[entry]) > 0: + if entry == 'targetTemperature' and 'action' in my_item_path: + myItem = self.items.return_item(my_item_path+'.'+entry) + if (myItem != None): + myItem(my_dict[entry],self.get_shortname()) + elif entry == 'targetTemperature' and 'state' in my_item_path: + myItem = self.items.return_item(my_item_path+'.'+entry) + if (myItem != None): + myItem(my_dict[entry],self.get_shortname()) + elif entry == 'temperature' and 'state' in my_item_path: + myItem = self.items.return_item(my_item_path+'.'+entry) + if (myItem != None): + myItem(my_dict[entry],self.get_shortname()) + else: + for myArrayEntry in my_dict[entry]: + if type(myArrayEntry) is dict: + self._parseDict2Item(myArrayEntry,my_item_path+'.'+entry) + else: + myItem = self.items.return_item(my_item_path+'.'+entry) + if (myItem != None): + myItem(my_dict[entry],self.get_shortname()) + #print (my_item_path+'.'+ entry +'=' + str(my_dict[entry])) + else: + myItem = self.items.return_item(my_item_path+'.'+entry) + if (myItem != None): + myItem(my_dict[entry],self.get_shortname()) + #print (my_item_path+'.'+ entry +'=' + str(my_dict[entry])) + + def _getActions4Device(self,deviceId): + + + myHeaders = { + "Authorization" : "Bearer {}".format(self.AccessToken) + } + + myUrl = self.Url + "/devices/{}/actions".format(deviceId) + myResult = requests.get(myUrl,headers=myHeaders,timeout=5.0) + try: + if (myResult.status_code == 200): + myActions = json.loads(myResult.content.decode()) + self.logger.debug("Got all actions from Miele-Cloud for {} - start parsing to Items".format(deviceId)) + return myActions + except Exception as err: + self.logger.warning("Error while getting Actions for Device :{}".format(deviceId)) + + def putCommand2Device(self,deviceID, myPayload): + try: + myHeaders = { + "Authorization" : "Bearer {}".format(self.AccessToken) + } + + myUrl = self.Url + "/devices/{}/actions".format(deviceID) + myResult = requests.put(myUrl,headers=myHeaders,json=myPayload,timeout=10.0) + except Exception as err: + self.logger.warning("Error while sending Command : {} to device {} using URL : {}- Error : {}".format(myPayload,deviceID,myUrl, err)) + pass + self.logger.debug("Result : {} sending command : {} to device {} using URL : {}".format(myResult.status_code, myPayload,deviceID,myUrl)) + + + def parse_item(self, item): + """ + Default plugin parse_item method. Is called when the plugin is initialized. + The plugin can, corresponding to its attribute keywords, decide what to do with + the item in future, like adding it to an internal array for future reference + :param item: The item to process. + :return: If the plugin needs to be informed of an items change you should return a call back function + like the function update_item down below. An example when this is needed is the knx plugin + where parse_item returns the update_item function when the attribute knx_send is found. + This means that when the items value is about to be updated, the call back function is called + with the item, caller, source and dest as arguments and in case of the knx plugin the value + can be sent to the knx with a knx write function within the knx plugin. + """ + if self.has_iattr(item.conf, 'miele_deviceid'): + self.logger.debug("parse item: {}".format(item)) + self.miele_devices_by_deviceID[item.conf['miele_deviceid']] = item.path() + self.miele_devices_by_item[item.path()] = item.conf['miele_deviceid'] + if not item in self.miele_items: + self.miele_items.append(item) + return self.update_item + + if self.has_iattr(item.conf, 'miele_command'): + self.logger.debug("parse item: {}".format(item)) + self.miele_device_by_action[item.path()] = '' + if not item in self.miele_items: + self.miele_items.append(item) + return self.update_item + + if self.has_iattr(item.conf, 'miele_parse_item'): + self.logger.debug("parse item: {}".format(item)) + self.miele_parsed_item[item.path()] = '' + if not item in self.miele_items: + self.miele_items.append(item) + return self.update_item + # todo + # if interesting item for sending values: + # return self.update_item + + def parse_logic(self, logic): + """ + Default plugin parse_logic method + """ + if 'xxx' in logic.conf: + # self.function(logic['name']) + pass + + def update_item(self, item, caller=None, source=None, dest=None): + """ + Item has been updated + + This method is called, if the value of an item has been updated by SmartHomeNG. + It should write the changed value out to the device (hardware/interface) that + is managed by this plugin. + + :param item: item to be updated towards the plugin + :param caller: if given it represents the callers name + :param source: if given it represents the source + :param dest: if given it represents the dest + """ + + if self.alive and caller != self.get_shortname(): + # code to execute if the plugin is not stopped + # and only, if the item has not been changed by this this plugin: + self.logger.info("Update item: {}, item has been changed inside this plugin".format(item.id())) + + if self.has_iattr(item.conf, 'foo_itemtag'): + self.logger.debug("update_item was called with item '{}' from caller '{}', source '{}' and dest '{}'".format(item, + caller, source, dest)) + if self.has_iattr(item.conf, 'miele_command') and item() == True: + deviceId = self.miele_device_by_action[item.path()] + myPayload = json.loads(item.conf['miele_command']) + self.putCommand2Device(deviceId, myPayload) + myPayload = self._getActions4Device(deviceId) + self._parseAction4Device(myPayload, deviceId) + + if self.has_iattr(item.conf, 'miele_command') and 'targetTemperature' in item.conf['miele_command']: + deviceId = self.miele_device_by_action[item.path()] + myPayload = item.conf['miele_command'].replace("%1",str(item())) + myPayload = json.loads(myPayload) + self.putCommand2Device(deviceId, myPayload) + self._getalldevices() + + + + # Function todo all the time when items are changed + if self.alive : + if self.has_iattr(item.conf, 'miele_parse_item'): + self._getMainItem4parseItem() + myMainItem = self.miele_parsed_item[item.path()] + if 'targetTemperature' in item.path() and 'actions' in item.path(): + myValues = item() + for entry in myValues: + myZone = entry['zone'] + myMin = entry['min'] + myMax = entry['max'] + myArray = [] + myTextArray = [] + for i in range(myMin, myMax+1): + myArray.append(i) + myTextArray.append(str(i)+'°') + myTargetItem = myMainItem+'.values.temperatur_zone_'+ str(myZone) +'.range_index' + myItem = self.items.return_item(myTargetItem) + if (myItem != None): + myItem(myArray,self.get_shortname()) + + myTargetItem = myMainItem+'.values.temperatur_zone_'+ str(myZone) +'.range_description' + myItem = self.items.return_item(myTargetItem) + if (myItem != None): + myItem(myTextArray,self.get_shortname()) + if 'targetTemperature' in item.path() and 'state' in item.path(): + myValues = item() + myZone = 0 + for entry in myValues: + myZone += 1 + myTargetItem = myMainItem+'.visu.values.temperatur_zone_'+ str(myZone)+'.target_temperature' + myItem = self.items.return_item(myTargetItem) + if (myItem != None): + myItem(entry['value_localized'],self.get_shortname()) + myTargetItem = myMainItem+'.visu.values.temperatur_zone_'+ str(myZone)+'.unit' + myItem = self.items.return_item(myTargetItem) + if (myItem != None): + myItem(entry['unit'],self.get_shortname()) + if 'temperature' in item.path() and 'state' in item.path(): + myValues = item() + myZone = 0 + for entry in myValues: + myZone += 1 + myTargetItem = myMainItem+'.visu.values.temperatur_zone_'+ str(myZone)+'.temperature' + myItem = self.items.return_item(myTargetItem) + if (myItem != None): + myItem(entry['value_localized'],self.get_shortname()) + + + + + + def poll_device(self): + """ + Polls for updates of the device + + This method is only needed, if the device (hardware/interface) does not propagate + changes on it's own, but has to be polled to get the actual status. + It is called by the scheduler which is set within run() method. + """ + if self.auth == True: + try: + self._getalldevices() + for device in self.miele_devices_by_deviceID: + myPayload = self._getActions4Device(device) + self._parseAction4Device(myPayload, device) + + + except Exception as err: + self.logger.warning("mieleathome - error during _getalldevices in poll_device - {}".format(err)) + pass + if (self.last_ping_timestamp < datetime.now() - timedelta(minutes=5)): + self.logger.debug("mieleathome - no ping since 5 minutes - retry to get new Event-Connection") + try: + self.event_server.reconnect() + except Exception as err: + self.logger.warning("mieleathome - error while trying reconnect") + pass + + + # # get the value from the device + # device_value = ... + # + # # find the item(s) to update: + # for item in self.sh.find_items('...'): + # + # # update the item by calling item(value, caller, source=None, dest=None) + # # - value and caller must be specified, source and dest are optional + # # + # # The simple case: + # item(device_value, self.get_shortname()) + # # if the plugin is a gateway plugin which may receive updates from several external sources, + # # the source should be included when updating the the value: + # item(device_value, self.get_shortname(), source=device_source_id) + + + +class miele_event(threading.Thread): + def __init__(self, logger, url, access_token, mieleathome): + threading.Thread.__init__(self) + self.logger = logger + self.url = url+ '/devices/all/events' + self.access_token = access_token + self.request = None + self.alive = False + self.mieleathome = mieleathome + self.last_event = "" + def run(self): + + self.alive = True + self.logger.debug("mieleathome - starting Event-Listener") + self.connect() + + + def reconnect(self): + self.logger.debug("mieleathome - try to establish new Event-Connection") + try: + self.response.close() + except: + self.logger.warning("mieleathome - Error while closing Event-Connection") + pass + try: + self.connect() + except: + self.logger.warning("mieleathome - Error while estabslishing new Event-Connection") + pass + + + def connect(self): + while self.alive == True: + try: + myHeaders = { + "Authorization" : "Bearer {}".format(self.access_token), + "Accept": "text/event-stream", + "Accept-Language" : "de-DE", + "Connection": "Keep-Alive" + } + self.response = requests.get(self.url,headers=myHeaders, stream=True,timeout=30.0) + + for line in self.response.iter_lines(): + if (not self.alive): + try: + self.response.close() + except: + pass + break + if line: + myPayload = line.decode() + + if ('event' in myPayload): + self.last_event = myPayload.split(":")[1].strip() + continue + elif 'ping' not in myPayload: + myPayload=json.loads(myPayload[6:].strip()) + + if self.last_event == "ping": + self.last_event = "" + self.mieleathome.last_ping_time = datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + self.mieleathome.last_ping_timestamp = datetime.now() + elif self.last_event == "devices": + self.logger.debug("mieleathome - got devices-Event :" + json.dumps(myPayload)) + self.last_event = "" + self.mieleathome.last_event_time = datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + self.mieleathome._parseAllDevices(myPayload) + if (myPayload != {}): + self.mieleathome.last_event_device = myPayload + elif self.last_event == "actions": + self.logger.debug("mieleathome - got actions-Event :" + json.dumps(myPayload)) + self.mieleathome.last_event_time = datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + self.last_event = "" + if (myPayload != {}): + self.mieleathome.last_event_action = myPayload + for device in myPayload: + self.mieleathome._parseAction4Device(myPayload[device], device) + + except Exception as err: + # Happens when Internet-Connection was disconnted + if self.alive == True: + self.logger.warning("mieleathome - connection canceled - waiting 30sec - retry to get new Event-Connection - reason : {}".format(err)) + time.sleep(30) + self.last_event = '' + pass + + + def stop(self): + self.logger.debug("mieleathome - stoping Event-Listener") + self.response.close() + diff --git a/mieleathome/assets/img.png b/mieleathome/assets/img.png new file mode 100644 index 000000000..6f7b59ced Binary files /dev/null and b/mieleathome/assets/img.png differ diff --git a/mieleathome/assets/img_1.png b/mieleathome/assets/img_1.png new file mode 100644 index 000000000..6f7b59ced Binary files /dev/null and b/mieleathome/assets/img_1.png differ diff --git a/mieleathome/assets/img_10.png b/mieleathome/assets/img_10.png new file mode 100644 index 000000000..b4de2ce9c Binary files /dev/null and b/mieleathome/assets/img_10.png differ diff --git a/mieleathome/assets/img_11.png b/mieleathome/assets/img_11.png new file mode 100644 index 000000000..ce6742316 Binary files /dev/null and b/mieleathome/assets/img_11.png differ diff --git a/mieleathome/assets/img_12.png b/mieleathome/assets/img_12.png new file mode 100644 index 000000000..d7e486224 Binary files /dev/null and b/mieleathome/assets/img_12.png differ diff --git a/mieleathome/assets/img_13.png b/mieleathome/assets/img_13.png new file mode 100644 index 000000000..ec1b29b89 Binary files /dev/null and b/mieleathome/assets/img_13.png differ diff --git a/mieleathome/assets/img_14.png b/mieleathome/assets/img_14.png new file mode 100644 index 000000000..96cda9d45 Binary files /dev/null and b/mieleathome/assets/img_14.png differ diff --git a/mieleathome/assets/img_15.png b/mieleathome/assets/img_15.png new file mode 100644 index 000000000..d973ec897 Binary files /dev/null and b/mieleathome/assets/img_15.png differ diff --git a/mieleathome/assets/img_16.png b/mieleathome/assets/img_16.png new file mode 100644 index 000000000..efdffa81a Binary files /dev/null and b/mieleathome/assets/img_16.png differ diff --git a/mieleathome/assets/img_17.png b/mieleathome/assets/img_17.png new file mode 100644 index 000000000..efdffa81a Binary files /dev/null and b/mieleathome/assets/img_17.png differ diff --git a/mieleathome/assets/img_18.png b/mieleathome/assets/img_18.png new file mode 100644 index 000000000..351d09a45 Binary files /dev/null and b/mieleathome/assets/img_18.png differ diff --git a/mieleathome/assets/img_2.png b/mieleathome/assets/img_2.png new file mode 100644 index 000000000..8f02542ad Binary files /dev/null and b/mieleathome/assets/img_2.png differ diff --git a/mieleathome/assets/img_3.png b/mieleathome/assets/img_3.png new file mode 100644 index 000000000..efd0cb00a Binary files /dev/null and b/mieleathome/assets/img_3.png differ diff --git a/mieleathome/assets/img_4.png b/mieleathome/assets/img_4.png new file mode 100644 index 000000000..8a4f04c65 Binary files /dev/null and b/mieleathome/assets/img_4.png differ diff --git a/mieleathome/assets/img_5.png b/mieleathome/assets/img_5.png new file mode 100644 index 000000000..f2aeba29f Binary files /dev/null and b/mieleathome/assets/img_5.png differ diff --git a/mieleathome/assets/img_6.png b/mieleathome/assets/img_6.png new file mode 100644 index 000000000..6e0c424c4 Binary files /dev/null and b/mieleathome/assets/img_6.png differ diff --git a/mieleathome/assets/img_7.png b/mieleathome/assets/img_7.png new file mode 100644 index 000000000..48e6b09a7 Binary files /dev/null and b/mieleathome/assets/img_7.png differ diff --git a/mieleathome/assets/img_8.png b/mieleathome/assets/img_8.png new file mode 100644 index 000000000..22e8e80f0 Binary files /dev/null and b/mieleathome/assets/img_8.png differ diff --git a/mieleathome/assets/img_9.png b/mieleathome/assets/img_9.png new file mode 100644 index 000000000..d9baf2fed Binary files /dev/null and b/mieleathome/assets/img_9.png differ diff --git a/mieleathome/locale.yaml b/mieleathome/locale.yaml new file mode 100755 index 000000000..c0984a9ee --- /dev/null +++ b/mieleathome/locale.yaml @@ -0,0 +1,10 @@ +# translations for the web interface +plugin_translations: + # Translations for the plugin specially for the web interface + 'Wert 2': {'de': '=', 'en': 'Value 2'} + 'Wert 4': {'de': '=', 'en': 'Value 4'} + + # Alternative format for translations of longer texts: + 'Hier kommt der Inhalt des Webinterfaces hin.': + de: '=' + en: 'Here goes the content of the web interface.' diff --git a/mieleathome/miele.html b/mieleathome/miele.html new file mode 100644 index 000000000..5f16dfbf8 --- /dev/null +++ b/mieleathome/miele.html @@ -0,0 +1,284 @@ +/** +* ----------------------------------------------------------------------------- +* @package smartVISU +* @author Andre Kohler +* @copyright 2020 +* @license GPL [http://www.gnu.de] +* ----------------------------------------------------------------------------- +*/ + +{% extends "rooms.html" %} + +{% import "lib.html" as lib %} +{% import "basic.html" as basic %} +{% import "calendar.html" as calendar %} +{% import "clock.html" as clock %} +{% import "device.html" as device %} +{% import "icon.html" as icon %} +{% import "multimedia.html" as multimedia %} +{% import "phone.html" as phone %} +{% import "plot.html" as plot %} +{% import "popup.html" as popup %} +{% import "status.html" as status %} +{% import "weather.html" as weather %} +{% import "quad.html" as quad %} + + + +{% block content %} + +
+
+
+

Miele Control Center (Dryer)

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Gerätetyp:{{ basic.print('', 'MieleDevices.Dryer.ident.device_type.value_localized', 'text') }}
Typenbezeichnung:{{ basic.print('', 'MieleDevices.Dryer.ident.deviceIdentLabel.techType','text') }}
Materialnummer:{{ basic.print('', 'MieleDevices.Dryer.ident.deviceIdentLabel.matNumber', 'text') }}
fabNummer:{{ basic.print('Serial', 'MieleDevices.Dryer.ident.deviceIdentLabel.fabNumber', '') }}
Gerätestatus:{{ basic.print('', 'MieleDevices.Dryer.state.status.value_localized','') }}
Programm:{{ basic.print('', 'MieleDevices.Dryer.state.programType.value_localized', '') }}
Trockenstufe:{{ basic.print('', 'MieleDevices.Dryer.state.dryingStep.value_localized', '') }}
Programm Phase:{{ basic.print('', 'MieleDevices.Dryer.state.programPhase.value_localized', 'text') }}
Energieverbrauch Forecast:{{ basic.print('', 'MieleDevices.Dryer.state.ecoFeedback.energyForecast', 'text') }} + {{ basic.print('', 'MieleDevices.Dryer.state.ecoFeedback.currentEnergyConsumption.unit', 'text') }} +
Gerätetür:{{basic.symbol('','MieleDevices.Dryer.state.signalDoor',['offen','geschlossen'],['fts_door_unlocked','fts_door_locked'],['1','0'],'',['red','green'],'','','','')}}
verbleibende Zeit:{{ basic.print('', 'MieleDevices.Dryer.visu.times.remainingTime', 'text') }}
verstrichene Zeit:{{ basic.print('', 'MieleDevices.Dryer.visu.times.elapsedTime', 'text') }}
Start-Zeit:{{ basic.input('', 'MieleDevices.Dryer.visu.times.startTime', 'time') }}
End-Zeit:{{ basic.input('', 'MieleDevices.Dryer.visu.times.stopTime', 'time') }}
+ + + + + +
+ {{ status.collapse('Control_1', 'MieleDevices.Dryer.visu.allowed_actions.start', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Dryer.visu.action_buttons.start','midi',[0,1],'audio_play','Start',['icon0','icon1'],'','','','')}} +
+
+ {{ status.collapse('Control_2', 'MieleDevices.Dryer.visu.allowed_actions.stop', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Dryer.visu.action_buttons.stop','midi',[0,1],'audio_stop','Stop',['icon0','icon1'],'','','','')}} +
+
+ {{ status.collapse('Control_3', 'MieleDevices.Dryer.visu.allowed_actions.pause', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Dryer.visu.action_buttons.pause','midi',[0,1],'audio_pause','Pause',['icon0','icon1'],'','','','')}} +
+
+ {{ status.collapse('Control_4', 'MieleDevices.Dryer.visu.allowed_actions.start_supercooling', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Dryer.visu.action_buttons.start_supercooling','midi',[0,1],'weather_snow','Cooling',['red','red'],'','','','')}} +
+
+ {{ status.collapse('Control_5', 'MieleDevices.Dryer.visu.allowed_actions.stop_supercooling', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Dryer.visu.action_buttons.stop_supercooling','midi',[0,1],'weather_snow','Cooling',['green','green'],'','','','')}} +
+
+ {{ status.collapse('Control_6', 'MieleDevices.Dryer.visu.allowed_actions.start_superfreezing', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Dryer.visu.action_buttons.start_superfreezing','midi',[0,1],'weather_frost','Freezing',['red','red'],'','','','')}} +
+
+ {{ status.collapse('Control_7', 'MieleDevices.Dryer.visu.allowed_actions.stop_superfreezing', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Dryer.visu.action_buttons.stop_superfreezing','midi',[0,1],'weather_frost','Freezing',['green','green'],'','','','')}} +
+
+ {{ status.collapse('Control_8', 'MieleDevices.Dryer.visu.allowed_actions.powerOn', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Dryer.visu.action_buttons.powerOn','midi',[0,1],'info_ack','Power On',['red','red'],'','','')}} +
+
+ {{ status.collapse('Control_9', 'MieleDevices.Dryer.visu.allowed_actions.powerOff', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Dryer.visu.action_buttons.powerOff','midi',[0,1],'info_error','Power Off',['green','green'],'','','')}} +
+
+ {{ status.collapse('Dryer_device_failure', 'MieleDevices.Dryer.state.signalFailure') }} +
+ {{ basic.symbol('','','','info_warning','','','red','','','') }} + Fehler am Gerät +
+ {{ status.collapse('Dryer_device_info', 'MieleDevices.Dryer.state.signalInfo') }} +
+ {{ basic.symbol('','','','info_attention','','','orange','','','') }} + Info am Gerät +
+ +
+
+
+ +
+
+
+

Miele Control Center (Freezer)

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Gerätetyp:{{ basic.print('', 'MieleDevices.Freezer.ident.device_type.value_localized', 'text') }}
Typenbezeichnung:{{ basic.print('', 'MieleDevices.Freezer.ident.deviceIdentLabel.techType','text') }}
Materialnummer:{{ basic.print('', 'MieleDevices.Freezer.ident.deviceIdentLabel.matNumber', 'text') }}
fabNummer:{{ basic.print('Serial', 'MieleDevices.Freezer.ident.deviceIdentLabel.fabNumber', '') }}
Gerätestatus:{{ basic.print('', 'MieleDevices.Freezer.state.status.value_localized','') }}
Ist-Temperatur Zone 1:{{ basic.print('', 'MieleDevices.Freezer.visu.values.temperatur_zone_1.temperature', '°') }}
Soll-Temperatur Zone 1: {{ basic.select('','MieleDevices.Freezer.visu.values.temperatur_zone_1.target_temperature','','','','','','horizontal', + 'MieleDevices.Freezer.values.temperatur_zone_1.range_index','MieleDevices.Freezer.values.temperatur_zone_1.range_description') }}
Gerätetür:{{basic.symbol('','MieleDevices.Freezer.state.signalDoor',['offen','geschlossen'],['fts_door_unlocked','fts_door_locked'],['1','0'],'',['red','green'],'','','','')}}
+ + + + +
+ {{ status.collapse('Control_Freezer_1', 'MieleDevices.Freezer.visu.allowed_actions.start', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Freezer.visu.action_buttons.start','midi',[0,1],'audio_play','Start',['icon0','icon1'],'','','','')}} +
+
+ {{ status.collapse('Control_Freezer_2', 'MieleDevices.Freezer.visu.allowed_actions.stop', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Freezer.visu.action_buttons.stop','midi',[0,1],'audio_stop','Stop',['icon0','icon1'],'','','','')}} +
+
+ {{ status.collapse('Control_Freezer_3', 'MieleDevices.Freezer.visu.allowed_actions.pause', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Freezer.visu.action_buttons.pause','midi',[0,1],'audio_pause','Pause',['icon0','icon1'],'','','','')}} +
+
+ {{ status.collapse('Control_Freezer_4', 'MieleDevices.Freezer.visu.allowed_actions.start_supercooling', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Freezer.visu.action_buttons.start_supercooling','midi',[0,1],'weather_snow','Start Super Cooling',['red','red'],'','','','')}} +
+
+ {{ status.collapse('Control_Freezer_5', 'MieleDevices.Freezer.visu.allowed_actions.stop_supercooling', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Freezer.visu.action_buttons.stop_supercooling','midi',[0,1],'weather_snow','Stop Super Cooling',['green','green'],'','','','')}} +
+
+ {{ status.collapse('Control_Freezer_6', 'MieleDevices.Freezer.visu.allowed_actions.start_superfreezing', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Freezer.visu.action_buttons.start_superfreezing','midi',[0,1],'weather_frost','Start Super Freezing',['red','red'],'','','','')}} +
+
+ {{ status.collapse('Control_Freezer_7', 'MieleDevices.Freezer.visu.allowed_actions.stop_superfreezing', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Freezer.visu.action_buttons.stop_superfreezing','midi',[0,1],'weather_frost','Stop Super Freezing',['green','green'],'','','','')}} +
+
+ {{ status.collapse('Control_Freezer_8', 'MieleDevices.Freezer.visu.allowed_actions.powerOn', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Freezer.visu.action_buttons.powerOn','midi',[0,1],'info_ack','Power On',['red','red'],'','','')}} +
+
+ {{ status.collapse('Control_Freezer_9', 'MieleDevices.Freezer.visu.allowed_actions.powerOff', 0) }} +
+ {{basic.stateswitch('','MieleDevices.Freezer.visu.action_buttons.powerOff','midi',[0,1],'info_error','Power Off',['green','green'],'','','')}} +
+
+ + {{ status.collapse('Freezer_device_failure', 'MieleDevices.Freezer.state.signalFailure') }} +
+ {{ basic.symbol('','','','info_warning','','','red','','','') }} + Fehler am Gerät +
+ {{ status.collapse('Freezer_device_info', 'MieleDevices.Freezer.state.signalInfo') }} +
+ {{ basic.symbol('','','','info_attention','','','orange','','','') }} + Info am Gerät +
+ + +
+
+
+ + + + + +{% endblock %} + + + + diff --git a/mieleathome/plugin.yaml b/mieleathome/plugin.yaml new file mode 100755 index 000000000..22c9077ea --- /dev/null +++ b/mieleathome/plugin.yaml @@ -0,0 +1,465 @@ +# Metadata for the plugin +plugin: + # Global plugin attributes + type: gateway # plugin type (gateway, interface, protocol, system, web) + description: + de: 'Miele@Home-Anbindung' + en: 'Connect Miele@Home' + maintainer: sipple, AndreK01 +# tester: # Who tests this plugin? + state: develop # change to ready when done with development + keywords: iot Miele Home +# documentation: https://github.com/smarthomeNG/smarthome/wiki/CLI-Plugin # url of documentation (wiki) page + support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1512798-miele-home-mit-mqtt + + version: 1.0.0 # Plugin version (must match the version specified in __init__.py) + sh_minversion: 1.5 # minimum shNG version to use this plugin +# sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) +# py_minversion: 3.6 # minimum Python version to use for this plugin +# py_maxversion: # maximum Python version to use for this plugin (leave empty if latest) + multi_instance: False # plugin supports multi instance + restartable: true + classname: mieleathome # class containing the plugin + +parameters: + # Definition of parameters to be configured in etc/plugin.yaml (enter 'parameters: NONE', if section should be empty) + miele_cycle: + type: int + default: 300 + description: + de: 'Zeitlicher Abstand zwischen zwei Verbindungen zur API' + en: 'Time between two connects to the API' + miele_client_id: + type: str + default: '' + description: + de: 'Client ID der Miele API' + en: 'Client ID for the Miele-API' + miele_client_secret: + type: str + default: '' + description: + de: 'Client Secret der Miele API' + en: 'Client secret for the Miele-API' + miele_client_country: + type: str + default: 'de-DE' + description: + de: 'Länderkennung der Miele-API' + en: 'Counry to use for the Miele-API' + miele_user: + type: str + default: '' + description: + de: 'Zugangsdaten User' + en: 'Credentials User' + miele_pwd: + type: str + default: '' + description: + de: 'Zugangsdaten Passwort' + en: 'Credentials password' + +item_attributes: + # Definition of item attributes defined by this plugin (enter 'item_attributes: NONE', if section should be empty) + miele_deviceid: + type: str + mandatory: False + description: + de: 'Die DeviceId der Miele-cloud' + en: 'deviceId of the Miele-cloud' + miele_command: + type: str + mandatory: False + description: + de: 'Eine interne Funktion des Miele@Home-Plugins' + en: 'internal function of the Miele@Home-Plugin' + miele_visu_function: + type: str + mandatory: False + description: + de: 'Eine interne Funktion des Miele@Home-Plugins' + en: 'internal function of the Miele@Home-Plugin' + miele_parse_item: + type: bool + mandatory: False + description: + de: 'Eine interne Funktion des Miele@Home-Plugins' + en: 'internal function of the Miele@Home-Plugin' +item_structs: + # Definition of item-structure templates for this plugin (enter 'item_structs: NONE', if section should be empty) + child: + name: Vorlage Struktur Miele Geräte + ident: + device_type: + value_raw: + type: num + cache: 'on' + value_localized: + type: str + cache: 'on' + deviceName: + type: str + cache: 'on' + deviceIdentLabel: + fabNumber: + type: str + cache: 'on' + fabIndex: + type: str + cache: 'on' + techType: + type: str + cache: 'on' + matNumber: + type: str + cache: 'on' + xkmIdentLabel: + techType: + type: str + cache: 'on' + releaseVersion: + type: str + cache: 'on' + state: + ProgramID: + value_localized: + type: str + cache: 'on' + status: + value_localized: + type: str + cache: 'on' + enforce_updates: yes + value_raw: + type: num + cache: 'on' + enforce_updates: yes + programType: + value_localized: + type: str + cache: 'on' + programPhase: + value_localized: + type: str + cache: 'on' + remainingTime: + type: list + cache: 'on' + enforce_updates: yes + startTime: + type: list + cache: 'on' + enforce_updates: yes + targetTemperature: + type: list + cache: 'on' + miele_parse_item: true + enforce_updates: yes + temperature: + type: list + cache: 'on' + miele_parse_item: true + signalInfo: + type: bool + cache: 'on' + enforce_updates: yes + signalFailure: + type: bool + cache: 'on' + enforce_updates: yes + signalDoor: + type: bool + cache: 'on' + enforce_updates: yes + dryingStep: + value_localized: + type: str + cache: 'on' + enforce_updates: yes + value_raw: + type: num + cache: 'on' + enforce_updates: yes + elapsedTime: + type: list + cache: 'on' + enforce_updates: yes + ecoFeedback: + currentWaterConsumption: + unit: + type: str + cache: 'on' + value: + type: num + cache: 'on' + currentEnergyConsumption: + unit: + type: str + cache: 'on' + value: + type: num + cache: 'on' + waterForecast: + type: num + cache: 'on' + energyForecast: + type: num + cache: 'on' + batteryLevel: + type: num + cache: 'on' + + + actions: + processAction: + type: list + cache: 'on' + light: + type: list + cache: 'on' + ventilationStep: + type: list + cache: 'on' + programId: + type: list + cache: 'on' + targetTemperature: + type: list + cache: 'on' + miele_parse_item: true + enforce_updates: yes + startTime: + type: list + cache: 'on' + deviceName: + type: bool + cache: 'on' + powerOn: + type: bool + cache: 'on' + powerOff: + type: bool + cache: 'on' + modes: + type: list + cache: 'on' + + + visu: + times: + scheduled_startTime: + visu_acl: rw + type: str + eval_trigger: ....state.startTime + eval: str("%0.2d"%sh.....state.startTime()[0])+":"+str("%0.2d"%sh.....state.startTime()[1]) + miele_parse_item: true + startTime: + visu_acl: rw + type: str + initial_value: "00:00:00" + miele_parse_item: true + stopTime: + visu_acl: rw + type: str + initial_value: "00:00:00" + miele_parse_item: true + elapsedTime: + type: str + eval_trigger: ....state.elapsedTime + eval: str("%0.2d"%sh.....state.elapsedTime()[0])+":"+str("%0.2d"%sh.....state.elapsedTime()[1]) if len(sh.....state.elapsedTime()) >0 else '00:00' + remainingTime: + type: str + eval_trigger: ....state.remainingTime + eval: str("%0.2d"%sh.....state.remainingTime()[0])+":"+str("%0.2d"%sh.....state.remainingTime()[1]) if len(sh.....state.remainingTime()) >0 else '00:00' + values: + temperatur_zone_1: + target_temperature: + type: num + visu_acl: rw + cache: 'on' + miele_command: '{"targetTemperature": [ { "zone": 1, "value": %1 } ]}' + temperature: + type: num + visu_acl: rw + cache: 'on' + unit: + type: str + visu_acl: rw + cache: 'on' + temperatur_zone_2: + target_temperature: + type: num + visu_acl: rw + cache: 'on' + miele_command: '{"targetTemperature": [ { "zone": 2, "value": %1 } ]}' + temperature: + type: num + visu_acl: rw + cache: 'on' + unit: + type: str + visu_acl: rw + cache: 'on' + temperatur_zone_3: + target_temperature: + type: num + visu_acl: rw + cache: 'on' + miele_command: '{"targetTemperature": [ { "zone": 3, "value": %1 } ]}' + temperature: + type: num + visu_acl: rw + cache: 'on' + unit: + type: str + visu_acl: rw + cache: 'on' + + allowed_actions: + start: + type: bool + visu_acl: rw + + stop: + type: bool + visu_acl: rw + + pause: + type: bool + visu_acl: rw + + start_superfreezing: + type: bool + visu_acl: rw + + stop_superfreezing: + type: bool + visu_acl: rw + + start_supercooling: + type: bool + visu_acl: rw + + stop_supercooling: + type: bool + visu_acl: rw + + deviceName: + type: bool + visu_acl: rw + + powerOn: + type: bool + visu_acl: rw + eval_trigger: ....actions.powerOn + eval: sh.....actions.powerOn() + powerOff: + type: bool + visu_acl: rw + eval_trigger: ....actions.powerOff + eval: sh.....actions.powerOff() + temp_Zone1: + type: bool + visu_acl: rw + eval_trigger: ....visu.values.temperatur_zone_1.target_temperature + eval: 1 if sh.....visu.values.temperatur_zone_1.target_temperature() != 0 else 0 + temp_Zone2: + type: bool + visu_acl: rw + eval_trigger: ....visu.values.temperatur_zone_2.target_temperature + eval: 1 if sh.....visu.values.temperatur_zone_2.target_temperature() != 0 else 0 + temp_Zone3: + type: bool + visu_acl: rw + eval_trigger: ....visu.values.temperatur_zone_3.target_temperature + eval: 1 if sh.....visu.values.temperatur_zone_3.target_temperature() != 0 else 0 + + + + action_buttons: + start: + type: bool + visu_acl: rw + autotimer: 1 = False + miele_command: '{"processAction": 1}' + enforce_updates: yes + stop: + type: bool + visu_acl: rw + autotimer: 1 = False + miele_command: '{"processAction": 2}' + enforce_updates: yes + pause: + type: bool + visu_acl: rw + autotimer: 1 = False + miele_command: '{"processAction": 3}' + enforce_updates: yes + start_superfreezing: + type: bool + visu_acl: rw + autotimer: 1 = False + miele_command: '{"processAction": 4}' + enforce_updates: yes + stop_superfreezing: + type: bool + visu_acl: rw + autotimer: 1 = False + miele_command: '{"processAction": 5}' + enforce_updates: yes + start_supercooling: + type: bool + visu_acl: rw + autotimer: 1 = False + miele_command: '{"processAction": 6}' + enforce_updates: yes + stop_supercooling: + type: bool + visu_acl: rw + autotimer: 1 = False + miele_command: '{"processAction": 7}' + enforce_updates: yes + powerOn: + type: bool + visu_acl: rw + miele_command: '{"powerOn":true}' + enforce_updates: yes + powerOff: + type: bool + visu_acl: rw + miele_command: '{"powerOff":true}' + enforce_updates: yes + + values: + temperatur_zone_1: + range_index: + type: list + visu_acl: rw + range_description: + type: list + visu_acl: rw + temperatur_zone_2: + range_index: + type: list + visu_acl: rw + range_description: + type: list + visu_acl: rw + temperatur_zone_3: + range_index: + type: list + visu_acl: rw + range_description: + type: list + visu_acl: rw + +#item_attribute_prefixes: + # Definition of item attributes that only have a common prefix (enter 'item_attribute_prefixes: NONE' or ommit this section, if section should be empty) + # NOTE: This section should only be used, if really nessesary (e.g. for the stateengine plugin) + +plugin_functions: + # Definition of plugin functions defined by this plugin (enter 'plugin_functions: NONE', if section should be empty) + +logic_parameters: + # Definition of logic parameters defined by this plugin (enter 'logic_parameters: NONE', if section should be empty) diff --git a/mieleathome/user_doc.rst b/mieleathome/user_doc.rst new file mode 100755 index 000000000..fcb29e588 --- /dev/null +++ b/mieleathome/user_doc.rst @@ -0,0 +1,6 @@ +Sample Plugin <- hier den Namen des Plugins einsetzen +===================================================== + +Anforderungen +------------- +Wird nachgereicht diff --git a/avm/_pv_1_5_12/webif/__init__.py b/mieleathome/webif/__init__.py similarity index 61% rename from avm/_pv_1_5_12/webif/__init__.py rename to mieleathome/webif/__init__.py index c06effb25..acedea7ee 100755 --- a/avm/_pv_1_5_12/webif/__init__.py +++ b/mieleathome/webif/__init__.py @@ -28,6 +28,7 @@ import datetime import time import os +import json from lib.item import Items from lib.model.smartplugin import SmartPluginWebIf @@ -41,6 +42,7 @@ import csv from jinja2 import Environment, FileSystemLoader + class WebInterface(SmartPluginWebIf): def __init__(self, webif_dir, plugin): @@ -52,13 +54,16 @@ def __init__(self, webif_dir, plugin): :type webif_dir: str :type plugin: object """ + self.logger = plugin.logger self.webif_dir = webif_dir self.plugin = plugin - self.logger = plugin.logger + self.items = Items.get_instance() + self.tplenv = self.init_template_environment() + @cherrypy.expose - def index(self, reload=None, action=None): + def index(self, reload=None): """ Build index.html for cherrypy @@ -66,23 +71,39 @@ def index(self, reload=None, action=None): :return: contents of the template after beeing rendered """ - tabcount = 3 - call_monitor_items = 0 - if self.plugin._call_monitor: - call_monitor_items = self.plugin._monitoring_service.get_item_count_total() - tabcount = 4 - tmpl = self.tplenv.get_template('index.html') - return tmpl.render(plugin_shortname=self.plugin.get_shortname(), plugin_version=self.plugin.get_version(), - plugin_info=self.plugin.get_info(), tabcount=tabcount, - avm_items=self.plugin.get_fritz_device().get_item_count(), - call_monitor_items=call_monitor_items, - p=self.plugin) + # add values to be passed to the Jinja2 template eg: tmpl.render(p=self.plugin, interface=interface, ...) + return tmpl.render(p=self.plugin, + items=sorted(self.items.return_items(), key=lambda k: str.lower(k['_path'])), + item_count=len (self.plugin.miele_items)) @cherrypy.expose - def reboot(self): - self.plugin.reboot() + def get_data_html(self, dataSet=None): + """ + Return data to update the webpage + + For the standard update mechanism of the web interface, the dataSet to return the data for is None + + :param dataSet: Dataset for which the data should be returned (standard: None) + :return: dict with the data needed to update the web page. + """ + if dataSet is None: + # get the new data + data = {} + data['Device']=self.plugin.last_event_device + data['Action']=self.plugin.last_event_action + data['last_Event']=self.plugin.last_event_time + data['last_Ping']=self.plugin.last_ping_time + + + # data['item'] = {} + # for i in self.plugin.items: + # data['item'][i]['value'] = self.plugin.getitemvalue(i) + # + # return it as json the the web page + try: + return json.dumps(data) + except Exception as e: + self.logger.error("get_data_html exception: {}".format(e)) + - @cherrypy.expose - def reconnect(self): - self.plugin.reconnect() \ No newline at end of file diff --git a/mieleathome/webif/static/img/plugin_logo.svg b/mieleathome/webif/static/img/plugin_logo.svg new file mode 100644 index 000000000..d823c6150 --- /dev/null +++ b/mieleathome/webif/static/img/plugin_logo.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/mieleathome/webif/static/img/readme.txt b/mieleathome/webif/static/img/readme.txt new file mode 100755 index 000000000..1a7c55eef --- /dev/null +++ b/mieleathome/webif/static/img/readme.txt @@ -0,0 +1,6 @@ +This directory is for storing images that are used by the web interface. + +If you want to have your own logo on the top of the web interface, store it here and name it plugin_logo.. + +Extension can be png, svg or jpg + diff --git a/mieleathome/webif/templates/index.html b/mieleathome/webif/templates/index.html new file mode 100755 index 000000000..8e5a42f4d --- /dev/null +++ b/mieleathome/webif/templates/index.html @@ -0,0 +1,286 @@ +{% extends "base_plugin.html" %} + +{% set logo_frame = false %} + + +{% set update_interval = 5000 %} + + +{% block pluginscripts %} + +{% endblock pluginscripts %} + + +{% block headtable %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
client_id{{ p.client_id }}client_secret{{ p.client_secret }}
access_token{{ p.AccessToken }}refresh_token{{ p.RefreshToken }}
valid from{{ p.ValidFrom }}valid through{{ p.ValidThrough }}{{ p.ValidFor }} {{ _('d') }}
Country{{ p.country }}Cycle{{ p._cycle }} {{ _('s') }}
+{% endblock headtable %} + + + +{% block buttons %} +{% if 1==2 %} +
+ +
+{% endif %} +{% endblock %} + + +{% set tabcount = 3 %} + + + +{% if item_count==0 %} + {% set start_tab = 1 %} +{% endif %} + + + +{% set tab1title = "" ~ p.get_shortname() ~ " Items (" ~ item_count ~ ")" %} +{% block bodytab1 %} +
+ +
+
+ +
+ + + + + + + + + + {% for item in p.miele_items %} + + + + + + {% endfor %} + +
{{ _('Item') }}{{ _('Type') }}{{ _('Value') }}
{{ item._path }}{{ item._type }}{{ item._value }}
+
+
+
+ +
+{% endblock bodytab1 %} + + + +{% set tab2title = "" ~ p.get_shortname() ~ " Geräte " %} +{% block bodytab2 %} +
+ +
+
+ +
+ + + + + + + + + + + {% for item in p.miele_devices_raw %} + + + + + + + {% endfor %} + +
{{ _('Device ID') }}{{ _('linked Item') }}{{ _('Device Type') }}{{ _('Model') }}
{{ item.DeviceID }}{{ p.miele_devices_by_deviceID[item.DeviceID]}}{{ item.DeviceTyp }}{{ item.DeviceModel }}
+
+
+
+ +
+{% endblock bodytab2 %} + + + +{% set tab3title = "Event-Informations " %} +{% block bodytab3 %} +
+
+
+ + + + + + + + + + + + + + + +
+ Last Event Update :

{{ p.last_event_time }}

+
+ Last Ping :

{{ p.last_ping_time }}

+
Device-Event Action-Event
+ + + + +
+
+
+
+ +" %} + + It has to be defined before (and outside) the block bodytab4 +--> +{% block bodytab4 %} +{% endblock bodytab4 %} diff --git a/miflora/README.md b/miflora/README.md index 2501d9dd6..39c701251 100755 --- a/miflora/README.md +++ b/miflora/README.md @@ -1,4 +1,4 @@ -# Miflora +# miflora ## Requirements This plugin requires lib miflora in version 0.4 or above. You can install this lib with: @@ -26,8 +26,7 @@ Forum thread to the plugin: https://knx-user-forum.de/forum/supportforen/smartho ```yaml miflora: - class_name: Miflora - class_path: miflora + plugin_name: miflora bt_library: bluepy bt_addr: C4:7C:7E:21:F3:2B cycle: 300 diff --git a/milight/README.md b/milight/README.md index 505a69267..eec974109 100755 --- a/milight/README.md +++ b/milight/README.md @@ -1,4 +1,4 @@ -# Milight +# milight #### Version 1.6.0 @@ -41,8 +41,7 @@ Typical configuration ```yaml milight: - class_name: milight - class_path = plugins.milight + plugin_name: milight #udp_ip: 192.168.123.147 #udp_port: 8899 #bri: yes diff --git a/mlgw/README.md b/mlgw/README.md index e7eec9ba0..4fbceae4a 100755 --- a/mlgw/README.md +++ b/mlgw/README.md @@ -1,4 +1,4 @@ -# Bang & Olufsen Masterlink Gateway +# mlgw Plugin - Bang & Olufsen Masterlink Gateway ## Changelog @@ -43,8 +43,7 @@ This plugin need a Bang & Olufsen Masterlink Gateway and can connect to it via T ```yaml mlgw: - class_name: Mlgw - class_path: plugins.mlgw + plugin_name: mlgw host: mlgw.local # port: 9000 # username: mlgw diff --git a/modbus_tcp/__init__.py b/modbus_tcp/__init__.py index 63352166e..53858f52a 100755 --- a/modbus_tcp/__init__.py +++ b/modbus_tcp/__init__.py @@ -4,7 +4,7 @@ # Copyright 2022 De Filippis Ivan # Copyright 2022 Ronny Schulz ######################################################################### -# This file is part of SmartHomeNG. +# This file is part of SmartHomeNG. # # Sample plugin for new plugins to run with SmartHomeNG version 1.4 and # upwards. @@ -57,37 +57,37 @@ class modbus_tcp(SmartPlugin): ALLOW_MULTIINSTANCE = True - PLUGIN_VERSION = '1.0.7' + PLUGIN_VERSION = '1.0.8' def __init__(self, sh, *args, **kwargs): """ Initializes the plugin. The parameters describe for this method are pulled from the entry in plugin.conf. :param sh: The instance of the smarthome object, save it for later references """ - + self.logger.info('Init modbus_tcp plugin') - + # Call init code of parent class (SmartPlugin) super().__init__() - + self._host = self.get_parameter_value('host') self._port = int(self.get_parameter_value('port')) self._cycle = int(self.get_parameter_value('cycle')) self._slaveUnit = int(self.get_parameter_value('slaveUnit')) self._slaveUnitRegisterDependend = False - + self._sh = sh self._regToRead = {} self._regToWrite = {} self._pollStatus = {} self.connected = False - + self._Mclient = ModbusTcpClient(self._host, port=self._port) self.lock = threading.Lock() - + self.init_webinterface(WebInterface) - + return def run(self): @@ -106,7 +106,7 @@ def stop(self): self.scheduler_remove('modbusTCP_poll_device') self._Mclient.close() self.connected = False - + def parse_item(self, item): """ Default plugin parse_item method. Is called when the plugin is initialized. @@ -118,7 +118,7 @@ def parse_item(self, item): if self.has_iattr(item.conf, AttrAddress): self.logger.debug("parse item: {0}".format(item)) regAddr = int(self.get_iattr_value(item.conf, AttrAddress)) - + objectType = 'HoldingRegister' value = item() dataType = 'uint16' @@ -127,7 +127,7 @@ def parse_item(self, item): wordOrder = 'Endian.Big' slaveUnit = self._slaveUnit dataDirection = 'read' - + if self.has_iattr(item.conf, AttrType): dataType = self.get_iattr_value(item.conf, AttrType) if self.has_iattr(item.conf, AttrSlaveUnit): @@ -136,13 +136,13 @@ def parse_item(self, item): self._slaveUnitRegisterDependend = True if self.has_iattr(item.conf, AttrObjectType): objectType = self.get_iattr_value(item.conf, AttrObjectType) - + reg = str(objectType) # dictionary key: objectType.regAddr.slaveUnit // HoldingRegister.528.1 reg += '.' reg += str(regAddr) reg += '.' reg += str(slaveUnit) - + if self.has_iattr(item.conf, AttrDirection): dataDirection = self.get_iattr_value(item.conf, AttrDirection) if self.has_iattr(item.conf, AttrFactor): @@ -164,9 +164,9 @@ def parse_item(self, item): wordOrder = Endian.Little else: wordOrder = Endian.Big - self.logger.warning("Invalid byte order -> default(Endian.Big) is used") - - regPara = {'regAddr': regAddr, 'slaveUnit': slaveUnit, 'dataType': dataType, 'factor': factor, 'byteOrder': byteOrder, + self.logger.warning("Invalid byte order -> default(Endian.Big) is used") + + regPara = {'regAddr': regAddr, 'slaveUnit': slaveUnit, 'dataType': dataType, 'factor': factor, 'byteOrder': byteOrder, 'wordOrder': wordOrder, 'item': item, 'value': value, 'objectType': objectType, 'dataDir': dataDirection } if dataDirection == 'read': self._regToRead.update({reg: regPara}) @@ -177,9 +177,9 @@ def parse_item(self, item): self.logger.info("parse item: {0} Attributes {1}".format(item, regPara)) return self.update_item else: - self.logger.warning("Invalid data direction -> default(read) is used") + self.logger.warning("Invalid data direction -> default(read) is used") self._regToRead.update({reg: regPara}) - + def poll_device(self): """ Polls for updates of the device @@ -237,7 +237,7 @@ def poll_device(self): self.logger.debug("poll_device: {0} register readed requed-time: {1}".format(regCount, duration)) except Exception as e: self.logger.error("something went wrong in the poll_device function: {0}".format(e)) - + # called each time an item changes. def update_item(self, item, caller=None, source=None, dest=None): """ @@ -255,8 +255,8 @@ def update_item(self, item, caller=None, source=None, dest=None): objectType = 'HoldingRegister' slaveUnit = self._slaveUnit dataDirection = 'read' - - + + if caller == self.get_fullname(): #self.logger.debug('item was changed by the plugin itself - caller:{0} source:{1} dest:{2} '.format(caller, source, dest)) return @@ -281,7 +281,7 @@ def update_item(self, item, caller=None, source=None, dest=None): objectType = self.get_iattr_value(item.conf, AttrObjectType) else: return - + reg = str(objectType) # Dict-key: HoldingRegister.528.1 *** objectType.regAddr.slaveUnit *** reg += '.' reg += str(regAddr) @@ -311,34 +311,34 @@ def update_item(self, item, caller=None, source=None, dest=None): self.__write_Registers(regPara, item()) except Exception as e: self.logger.error("something went wrong in the __write_Registers function: {0}".format(e)) - + def __write_Registers(self, regPara, value): objectType = regPara['objectType'] address = regPara['regAddr'] slaveUnit = regPara['slaveUnit'] - bo = regPara['byteOrder'] + bo = regPara['byteOrder'] wo = regPara['wordOrder'] dataTypeStr = regPara['dataType'] dataType = ''.join(filter(str.isalpha, dataTypeStr)) # vom dataType die Ziffen entfernen z.B. uint16 = uint registerCount = 0 # Anzahl der zu schreibenden Register (Words) - + try: bits = int(''.join(filter(str.isdigit, dataTypeStr))) # bit-Zahl aus aus dataType z.B. uint16 = 16 except: bits = 16 - - if dataType.lower() == 'string': + + if dataType.lower() == 'string': registerCount = int(bits/2) # bei string: bits = bytes !! string16 -> 16Byte - 8 registerCount else: registerCount = int(bits/16) - + if regPara['factor'] != 1: #self.logger.debug("value {0} divided by: {1}".format(value, regPara['factor'])) value = value * (1/regPara['factor']) - + self.logger.debug("write {0} to {1}.{2}.{3} (address.slaveUnit) dataType:{4}".format(value, objectType, address, slaveUnit, dataTypeStr)) builder = BinaryPayloadBuilder(byteorder=bo, wordorder=wo) - + if dataType.lower() == 'uint': if bits == 16: builder.add_16bit_uint(int(value)) @@ -370,7 +370,7 @@ def __write_Registers(self, regPara, value): if objectType == 'Coil' or objectType == 'DiscreteInput': if not type(value) == type(True): # test is boolean self.logger.error("Value is not boolean: {0}".format(value)) - return + return else: if set(value).issubset({'0', '1'}) and bool(value): # test is bit-string '00110101' builder.add_bits(value) @@ -379,7 +379,7 @@ def __write_Registers(self, regPara, value): else: self.logger.error("Number of bits or datatype not supported : {0}".format(dataTypeStr)) return None - + if objectType == 'Coil': result = self._Mclient.write_coil(address, value, unit=slaveUnit) elif objectType == 'HoldingRegister': @@ -396,48 +396,48 @@ def __write_Registers(self, regPara, value): if result.isError(): self.logger.error("write error: {0} {1}.{2}.{3} (address.slaveUnit)".format(result, objectType, address, slaveUnit)) return None - + if 'write_dt' in regPara: regPara['last_write_dt'] = regPara['write_dt'] regPara['write_dt'] = datetime.now() else: regPara.update({'write_dt': datetime.now()}) - + if 'write_value' in regPara: regPara['last_write_value'] = regPara['write_value'] regPara['write_value'] = value else: regPara.update({'write_value': value}) - + #regPara['write_dt'] = datetime.now() #regPara['write_value'] = value - - + + def __read_Registers(self, regPara): objectType = regPara['objectType'] dataTypeStr = regPara['dataType'] dataType = ''.join(filter(str.isalpha, dataTypeStr)) - bo = regPara['byteOrder'] + bo = regPara['byteOrder'] wo = regPara['wordOrder'] slaveUnit = regPara['slaveUnit'] registerCount = 0 address = regPara['regAddr'] value = None - + try: - bits = int(''.join(filter(str.isdigit, dataTypeStr))) + bits = int(''.join(filter(str.isdigit, dataTypeStr))) except: bits = 16 - - if dataType.lower() == 'string': + + if dataType.lower() == 'string': registerCount = int(bits/2) # bei string: bits = bytes !! string16 -> 16Byte - 8 registerCount else: registerCount = int(bits/16) - + if self.connected == False: self.logger.error(" not connect {0}:{1}".format(self._host, self._port)) return None - + #self.logger.debug("read {0}.{1}.{2} (address.slaveUnit) regCount:{3}".format(objectType, address, slaveUnit, registerCount)) if objectType == 'Coil': if pymodbus_baseversion > 2: @@ -462,11 +462,11 @@ def __read_Registers(self, regPara): else: self.logger.error("{0} not supported: {1}".format(AttrObjectType, objectType)) return None - + if result.isError(): self.logger.error("read error: {0} {1}.{2}.{3} (address.slaveUnit) regCount:{4}".format(result, objectType, address, slaveUnit, registerCount)) return None - + if objectType == 'Coil': value = result.bits[0] elif objectType == 'DiscreteInput': @@ -475,9 +475,9 @@ def __read_Registers(self, regPara): decoder = BinaryPayloadDecoder.fromRegisters(result.registers, byteorder=bo,wordorder=wo) else: decoder = BinaryPayloadDecoder.fromRegisters(result.registers, byteorder=bo,wordorder=wo) - + self.logger.debug("read {0}.{1}.{2} (address.slaveUnit) regCount:{3} result:{4}".format(objectType, address, slaveUnit, registerCount, result)) - + if dataType.lower() == 'uint': if bits == 16: return decoder.decode_16bit_uint() diff --git a/mvg_live/README.md b/mvg_live/README.md index 1bda22c8c..b8b9b21bb 100755 --- a/mvg_live/README.md +++ b/mvg_live/README.md @@ -1,4 +1,4 @@ -# MVG Live +# mvg_live - MVG Live ## Requirements This plugin requires lib PyMVGLive. You can install this lib with: @@ -19,8 +19,7 @@ Forum thread to the plugin: https://knx-user-forum.de/forum/supportforen/smartho ```yaml mvg_live: - class_name: MVG_Live - class_path: plugins.mvg_live + plugin_name: mvg_live ``` ### items.yaml diff --git a/neato/robot.py b/neato/robot.py index 5297ac43c..ed68aa0df 100755 --- a/neato/robot.py +++ b/neato/robot.py @@ -158,13 +158,13 @@ def robot_command(self, command, arg1 = None, arg2 = None): self.logger.warning(f"Command returned {str(responseJson['result'])}: Retry starting with non-persistent-map") return self.robot_command(command = 'start_non-persistent-map') else: - self.logger.error("Sending command failed. Result: {0}".format(str(responseJson['result']) )) + self.logger.error("Sending command {command} failed. Result: {0}".format(str(responseJson['result']) )) self.logger.error("Debug: send command response: {0}".format(start_cleaning_response.text)) else: if 'message' in responseJson: - self.logger.error("Sending command failed. Message: {0}".format(str(responseJson['message']))) + self.logger.error("Sending command {command} failed. Message: {0}".format(str(responseJson['message']))) if 'error' in responseJson: - self.logger.error("Sending command failed. Error: {0}".format(str(responseJson['error']))) + self.logger.error("Sending command {command} failed. Error: {0}".format(str(responseJson['error']))) # - NOT on Charge BASE return start_cleaning_response diff --git a/network/user_doc.rst b/network/user_doc.rst index 00f6452ae..d9baef67a 100755 --- a/network/user_doc.rst +++ b/network/user_doc.rst @@ -28,8 +28,7 @@ plugin.yaml .. code:: yaml nw: - class_name: Network - class_path: plugins.network + plugin_name: network # ip: 0.0.0.0 # port: 2727 tcp: yes diff --git a/onewire/__init__.py b/onewire/__init__.py index cefbfa4dd..3b378bc60 100755 --- a/onewire/__init__.py +++ b/onewire/__init__.py @@ -43,7 +43,7 @@ class OneWire(SmartPlugin): the update functions for the items """ - PLUGIN_VERSION = '1.9.2' + PLUGIN_VERSION = '1.9.4' _flip = {0: '1', False: '1', 1: '0', True: '0', '0': True, '1': False} @@ -124,6 +124,7 @@ def __init__(self, sh, *args, **kwargs ): self.read_alias_definitions() self._io_wait = self.get_parameter_value('io_wait') + self._parasitic_power_wait = self.get_parameter_value('parasitic_power_wait') self._button_wait = self.get_parameter_value('button_wait') self._cycle = self.get_parameter_value('cycle') self.log_counter_cycle_time = self.get_parameter_value('log_counter_cycle_time') @@ -292,7 +293,7 @@ def _io_cycle(self): items = self.get_items_for_mapping(addr + '-' + key) if path is None: if debugLog: - self.logger.debug(f"_io_cycle: path not found for {item.id()}") + self.logger.debug(f"_io_cycle: no item path found for mapping '{addr}-{key}'") continue try: # the following can take a while so if in the meantime the plugin should stop we can abort this process here @@ -303,16 +304,19 @@ def _io_cycle(self): value = (addr in entries) else: value = self._flip[self.owbase.read('/uncached' + path).decode()] + self.stopevent.wait(self._parasitic_power_wait) except ConnectionError as e: self.logger.warning(f"_io_cycle: 'raise' {self._ios[addr][key]['readerrors']}. problem connecting to {addr}-{key}, error: {e}") raise except Exception as e: + # time.sleep(self._parasitic_power_wait) + #self.stopevent.wait(self._parasitic_power_wait) self._ios[addr][key]['readerrors'] = self._ios[addr][key].get('readerrors', 0) + 1 if self._ios[addr][key]['readerrors'] % self.warn_after == 0: self.logger.warning(f"_io_cycle: {self._ios[addr][key]['readerrors']}. problem reading {addr}-{key}, error: {e}") continue if self._ios[addr][key].get('readerrors', 0) >= self.warn_after: - self.logger.notice(f"_io_cycle: Success reading {addr}-{key} {value=}, up to now there were {self._ios[addr][key]['readerrors']} consecutive problems") + self.logger.notice(f"_io_cycle: Success reading '{addr}-{key}' {value=}, up to now there were {self._ios[addr][key]['readerrors']} consecutive problems") self._ios[addr][key]['readerrors'] = 0 for item in items: item(value, self.get_shortname(), path) @@ -350,8 +354,8 @@ def _ibutton_cycle(self): try: entries = self.owbase.dir(path) except Exception: - #time.sleep(0.5) - self.stopevent.wait(0.5) + #time.sleep(self._parasitic_power_wait) + self.stopevent.wait(self._parasitic_power_wait) error = True continue for entry in entries: @@ -391,22 +395,25 @@ def _sensor_cycle(self): start = time.time() for addr in self._sensors: if not self.alive: - self.logger.debug(f"Self not alive (sensor={addr})") + self.logger.debug(f"'self' not alive (sensor={addr})") break for key in self._sensors[addr]: path = self._sensors[addr][key]['path'] items = self.get_items_for_mapping(addr+'-'+key) if path is None: if debugLog: - self.logger.debug(f"_sensor_cycle: path not found for {item.id()}") + self.logger.debug(f"_sensor_cycle: no item path found for mapping '{addr}-{key}'") continue try: value = self.owbase.read('/uncached' + path).decode() + self.stopevent.wait(self._parasitic_power_wait) value = float(value) if key.startswith('T') and value == 85: self.logger.error(f"reading {addr} gives error value 85.") continue except Exception as e: + # time.sleep(self._parasitic_power_wait) + #self.stopevent.wait(self._parasitic_power_wait) self._sensors[addr][key]['readerrors'] = self._sensors[addr][key].get('readerrors', 0) + 1 if self._sensors[addr][key]['readerrors'] % self.warn_after == 0: self.logger.warning(f"_sensor_cycle: {self._sensors[addr][key]['readerrors']}. problem reading {addr}-{key}, error: {e}") diff --git a/onewire/locale.yaml b/onewire/locale.yaml index 411b03160..17645c346 100755 --- a/onewire/locale.yaml +++ b/onewire/locale.yaml @@ -20,7 +20,7 @@ plugin_translations: 'Gerät(e)' : { 'de': '=', 'en': 'Device(s)' } # Alternative format for translations of longer texts: - 'Hier kommt der Inhalt des Webinterfaces hin.': + 'Wartezeit für parasitäre Spannung': de: '=' - en: 'Here goes the content of the web interface.' + en: 'Parasitic power wait time' diff --git a/onewire/plugin.yaml b/onewire/plugin.yaml index fa66bb55b..a74228138 100755 --- a/onewire/plugin.yaml +++ b/onewire/plugin.yaml @@ -11,7 +11,7 @@ plugin: keywords: 1wire onewire dallas ibutton sensor temperature humidity documentation: '' support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1493319-support-thread-zum-onewire-plugin - version: 1.9.2 # Plugin version + version: 1.9.4 # Plugin version sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin multi_instance: True restartable: True @@ -113,6 +113,15 @@ parameters: en: 'Time period between two requests of 1-wire I/O chip.' fr: 'Délai entre deux demandes de puce 1-wire I/O.' + parasitic_power_wait: + type: num + default: 0.5 + valid_min: 0.1 + valid_max: 1.5 + description: + de: 'Wartezeit in Sekunden, um pei parasitärer Spannungsversorgung der Sensoren die Busspannung zu regenerieren' + en: 'Waiting time in seconds to regenerate the bus voltage, if sensors are operated using parasitic power' + log_counter_io_loop_time: type: num default: 10 diff --git a/onewire/webif/templates/index.html b/onewire/webif/templates/index.html index 78031eadb..4e84fcb60 100755 --- a/onewire/webif/templates/index.html +++ b/onewire/webif/templates/index.html @@ -251,7 +251,7 @@ {{ p.host }} {{ _('IO Wartezeit') }} - {{ p._io_wait }} + {{ p._io_wait }} {{ _('Sek.') }} @@ -259,19 +259,15 @@ {{ p.port }} {{ _('iButton Wartezeit') }} - {{ p._button_wait }} + {{ p._button_wait }} {{ _('Sek.') }} {{ _('Cycle') }} {{ p._cycle }} {{ _('Sek.') }} - - - + {{ _('Wartezeit für parasitäre Spannung') }} + {{ p._parasitic_power_wait }} {{ _('Sek.') }} diff --git a/piratewthr/__init__.py b/piratewthr/__init__.py index 234eeb443..d8e860fdb 100755 --- a/piratewthr/__init__.py +++ b/piratewthr/__init__.py @@ -37,7 +37,7 @@ class PirateWeather(SmartPlugin): - PLUGIN_VERSION = "1.1.0" + PLUGIN_VERSION = "1.1.1" # https://api.pirateweather.net/forecast/[apikey]/[latitude],[longitude] _base_url = 'https://api.pirateweather.net/forecast/' @@ -230,7 +230,7 @@ def get_forecast(self): hour.update({'date': date_entry, 'weekday': day_entry, 'hour': hour_entry, 'icon_visu': self.map_icon(hour['icon'])}) if json_obj['daily'].get(date_key) is None: json_obj['daily'].update({date_key: {}}) - elif json_obj['daily'][date_key].get('hours') is None: + if json_obj['daily'][date_key].get('hours') is None: json_obj['daily'][date_key].update({'hours': {}}) json_obj['daily'][date_key]['hours'].update(OrderedDict({hour_entry: hour})) json_obj['hourly'].update(OrderedDict({'hour{}'.format(number): hour})) diff --git a/piratewthr/plugin.yaml b/piratewthr/plugin.yaml index 87dd91539..79507d926 100755 --- a/piratewthr/plugin.yaml +++ b/piratewthr/plugin.yaml @@ -11,7 +11,7 @@ plugin: keywords: weather sun wind rain precipitation #documentation: '' support: 'https://knx-user-forum.de/forum/supportforen/smarthome-py/1852685' - version: 1.1.0 # Plugin version + version: 1.1.1 # Plugin version sh_minversion: 1.9.3.4 # minimum shNG version to use this plugin #sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) multi_instance: True # plugin supports multi instance diff --git a/raumfeld_ng/README.md b/raumfeld_ng/README.md index d387e03ef..9321a4135 100755 --- a/raumfeld_ng/README.md +++ b/raumfeld_ng/README.md @@ -1,4 +1,4 @@ -# Sample Plugin <- put the name of your plugin here +# raumfeld_ng Plugin #### Version 1.x.y @@ -26,8 +26,7 @@ The plugin needs a running node-raumserver instance on the same or another host. ```yaml raumfeld_ng: - class_name: raumfeld_ng - class_path: plugins.raumfeld_ng + plugin_name: raumfeld_ng rf_HostIP: '127.0.0.1' rf_HostPort: '8080' ``` diff --git a/smartvisu/__init__.py b/smartvisu/__init__.py index 4173cf0bb..c9c9582ba 100755 --- a/smartvisu/__init__.py +++ b/smartvisu/__init__.py @@ -45,7 +45,7 @@ ######################################################################### class SmartVisu(SmartPlugin): - PLUGIN_VERSION="1.8.9" + PLUGIN_VERSION="1.8.10" ALLOW_MULTIINSTANCE = True visu_definition = None @@ -76,6 +76,8 @@ def __init__(self, sh): self._handle_widgets = self.get_parameter_value('handle_widgets') self._create_masteritem_file = self.get_parameter_value('create_masteritem_file') self.list_deprecated_warnings = self.get_parameter_value('list_deprecated_warnings') + self.protocol_over_reverseproxy = False + #self.protocol_over_reverseproxy = self.get_parameter_value('protocol_over_reverseproxy') self.smartvisu_version = self.get_smartvisu_version() if self.smartvisu_version == '': @@ -104,7 +106,7 @@ def __init__(self, sh): if self.mod_websocket is not None: self.payload_smartvisu = self.mod_websocket.get_payload_protocol_by_id('sv') try: - self.payload_smartvisu.set_smartvisu_support(protocol_enabled=True, default_acl=self.default_acl, query_definitions=False, series_updatecycle=0) + self.payload_smartvisu.set_smartvisu_support(protocol_enabled=True, default_acl=self.default_acl, query_definitions=False, series_updatecycle=0, protocol_over_reverseproxy=self.protocol_over_reverseproxy) except: self.logger.exception("Payload protocol 'smartvisu' of module 'websocket' could not be found.") diff --git a/smartvisu/plugin.yaml b/smartvisu/plugin.yaml index 9be222ada..c233ea076 100755 --- a/smartvisu/plugin.yaml +++ b/smartvisu/plugin.yaml @@ -8,11 +8,11 @@ plugin: maintainer: msinn tester: wvhn state: ready -# keywords: iot xyz - documentation: http://smarthomeng.de/user/plugins/visu_smartvisu/user_doc.html + #keywords: iot xyz + #documentation: '' support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1586800-support-thread-für-das-smartvisu-plugin - version: 1.8.9 # Plugin version + version: 1.8.10 # Plugin version sh_minversion: 1.9.3.5 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.6 # minimum Python version to use for this plugin @@ -89,6 +89,13 @@ parameters: de: 'Sollen Deprecated- bzw. Removed Warnungen und Fehler einzeln geloggt werden?' en: 'Should individual Deprecated/Removed warnings and errors be logged?' +# protocol_over_reverseproxy: +# type: bool +# default: False +# description: +# de: 'Clients greifen über einen Reverse Proxy auf SmartHomeNG zu' +# en: 'Clients access SmartHomeNG via a reverse proxy' + item_attributes: # Definition of item attributes defined by this plugin visu_acl: diff --git a/smartvisu/webif/templates/index.html b/smartvisu/webif/templates/index.html index 38e6a6be3..4fb8c9413 100755 --- a/smartvisu/webif/templates/index.html +++ b/smartvisu/webif/templates/index.html @@ -5,7 +5,7 @@ {% set update_interval = 10000 %} {% set reload_button = false %} - +{% set initial_update = true %} {% block pluginstyles %} {% endblock pluginstyles %} {% set tab1title = "" ~ p.get_shortname() ~ " " ~ _('Clients') ~ " (" ~ clients|length ~ ")" %} {% block bodytab1 %} - - - -
-
- {% endblock %} @@ -258,70 +286,35 @@ --> {% set tab2title = "" ~ p.get_shortname() ~ " " ~ _('Items') ~ " (" ~ items|length ~ ")" %} {% block bodytab2 %} -
-
- - - - - - - - - + +
{{ _('Item') }}{{ _('Typ') }}{{ _('Wert') }}{{ _('Visu Zugriff') }}
+ {% for item in items %} - + {% endfor %} +
{{ item._path }} {{ item._type }} {{ item() }} {{ item.conf['visu_acl'] }}
-
-
-{% endblock bodytab2 %} - - {% set tab3title = "" ~ p.get_shortname() ~ " " ~ _('Logiken') ~ " (" ~ logics|length ~ ")" %} {% block bodytab3 %} -
-
- - - - - - - - - +
{{ _('Logik') }}{{ _('Status') }}{{ _('Visu Zugriff') }}
+ {% for logic in logics %} - + - {% endfor %} +
{{ logic.name }} {% if logic.enabled %}{{ _('aktiv') }}{% else %}{{ _('nicht aktiv') }}{% endif %} {% if logic.visu_access %}{{ _('erlaubt') }}{% else %}{{ _('nicht erlaubt') }}{% endif %}
{% endblock bodytab3 %} - - - -{% set tab4title = "" ~ p.get_shortname() ~ " " ~ _('Clients') ~ " (" ~ clients|length ~ ")" %} - -{% block bodytab4 %} - -{% endblock bodytab4 %} diff --git a/snmp/requirements.txt b/snmp/requirements.txt index ce710bede..ea8985ff0 100755 --- a/snmp/requirements.txt +++ b/snmp/requirements.txt @@ -1 +1 @@ -puresnmp >=1.7.2 \ No newline at end of file +puresnmp >=1.7.2,<2.0.0 diff --git a/sonos/__init__.py b/sonos/__init__.py index 3b36f3a8e..deb39814b 100755 --- a/sonos/__init__.py +++ b/sonos/__init__.py @@ -194,27 +194,73 @@ def __init__(self, endpoint, service, logger, threadName): self._service = service self._endpoint = endpoint self._event = None - self._signal = None + self._signal = threading.Event() self.logger = logger self._threadName = threadName def subscribe(self): - self.logger.debug(f"start subscribe for endpoint {self._endpoint}") + self.logger.dbglow(f"start subscribe for endpoint {self._endpoint}") + if 'eventAvTransport' in self._threadName: + self.logger.dbghigh(f"subscribe(): endpoint av envent detected. Enabling debugging logs") + debug = 1 + else: + debug = 0 + + if debug: + self.logger.dbghigh(f"subscribe(): start for endpoint {self._endpoint}") + with self._lock: - self._signal = threading.Event() + if debug: + self.logger.dbghigh(f"subscribe(): clearing signal Event for endpoint {self._endpoint}") + self._signal.clear() + + # Check if signal was cleared correctly: + if self._signal.is_set(): + self.logger.error(f"subscribe(): Event could not be cleared correctly for service {self._service}") + else: + self.logger.dbghigh(f"subscribe(): Event cleared successfully. Thread can be started for service {self._service}") + try: - # self._event = self._service.subscribe(auto_renew=True) self._event = self._service.subscribe(auto_renew=False) + # No benefits of automatic renew could be observed. + # self._event = self._service.subscribe(auto_renew=True) + except Exception as e: - self.logger.warning(f"Exception in subscribe(): {e}") + self.logger.error(f"Exception in subscribe(): {e}") if self._event: - self._event.auto_renew_fail = renew_error_callback - self._thread = threading.Thread(target=self._endpoint, name=self._threadName, args=(self,)) - self._thread.setDaemon(True) - self._thread.start() + if debug: + self.logger.dbghigh(f"subscribe(): event valid, starting new thread for endpoint {self._endpoint}") + + try: + self._event.auto_renew_fail = renew_error_callback + self._thread = threading.Thread(target=self._endpoint, name=self._threadName, args=(self,)) + self._thread.setDaemon(True) + self._thread.start() + self.logger.debug(f"start subscribe finished successfully") + if not self._thread.is_alive(): + self.logger.error("Critical error in subscribe method: Thread could not be startet and is not alive.") + else: + if debug: + self.logger.dbghigh(f"Debug subscribe: Thread startet successfully for service {self._service}") + + except Exception as e: + self.logger.error(f"Exception in subscribe() at point b: {e}") + + else: + self.logger.error(f"subscribe(): Error in subscribe for endpoint {self._endpoint}: self._event not valid") + if debug: + self.logger.dbghigh(f"subscribe() {self._endpoint}: lock released. Self._service is {self._service}") def unsubscribe(self): - self.logger.debug(f"start unsubscribe for endpoint {self._endpoint}") + self.logger.dbglow(f"unsubscribe(): start for endpoint {self._endpoint}") + if 'eventAvTransport' in self._threadName: + self.logger.dbghigh(f"unsubscribe: endpoint av envent detected. Enabling debugging logs") + debug = 1 + else: + debug = 0 + if debug: + self.logger.dbghigh(f"unsubscribe(): start for endpoint {self._endpoint}") + with self._lock: if self._event: # try to unsubscribe first @@ -224,13 +270,42 @@ def unsubscribe(self): self.logger.warning(f"Exception in unsubscribe(): {e}") self._signal.set() if self._thread: - self.logger.debug("Preparing to terminate thread") + self.logger.dbglow("Preparing to terminate thread") + if debug: + self.logger.dbghigh(f"unsubscribe(): Preparing to terminate thread for endpoint {self._endpoint}") self._thread.join(2) + if debug: + self.logger.dbghigh(f"unsubscribe(): Thread joined for endpoint {self._endpoint}") + if not self._thread.is_alive(): - self.logger.debug("Thread killed") + self.logger.dbglow("Thread killed for enpoint {self._endpoint}") + if debug: + self.logger.dbghigh(f"Thread killed for endpoint {self._endpoint}") + else: - self.logger.warning("Thread is still alive") + self.logger.error("unsubscibe(): Error, thread is still alive") + self._thread = None self.logger.info(f"Event {self._endpoint} unsubscribed and thread terminated") + if debug: + self.logger.dbghigh(f"unsubscribe(): Event {self._endpoint} unsubscribed and thread terminated") + else: + if debug: + self.logger.warning(f"unsubscribe(): {self._endpoint}: self._event not valid") + if debug: + self.logger.dbghigh(f"unsubscribe(): {self._endpoint}: lock released") + + + @property + def eventSignalIsSet(self): + if self._signal: + return self._signal.is_set() + return False + + @property + def subscriptionThreadIsActive(self): + if self._thread: + return self._thread.is_alive() + return False @property def signal(self): @@ -415,6 +490,7 @@ def dispose(self): def subscribe_base_events(self): if not self._soco: + self.logger.error("Error in subscribe_base_events: self._soco not valid.") return self.logger.debug("Start subscribe base event fct") self.zone_subscription.unsubscribe() @@ -432,6 +508,11 @@ def subscribe_base_events(self): self.render_subscription.unsubscribe() self.render_subscription.subscribe() + # Important note: + # av event is not subscribed here because it has special handling in function zone group event. + pass + + def refresh_static_properties(self) -> None: """ This function is called by the plugins discover function. This is typically called every 180sec. @@ -500,7 +581,7 @@ def _rendering_control_event(self, sub_handler: SubscriptionHandler) -> None: self.night_mode = event.variables['night_mode'] if 'dialog_mode' in event.variables: self.dialog_mode = event.variables['dialog_mode'] - self.logger.debug(f"{self.uid}: event variables: {event.variables}") + self.logger.debug(f"rendering_control_event: {self.uid}: event variables: {event.variables}") sub_handler.event.events.task_done() del event except Empty: @@ -618,146 +699,161 @@ def _av_transport_event(self, sub_handler: SubscriptionHandler) -> None: AV event handling :param sub_handler: SubscriptionHandler for the av transport event """ - try: - self.logger.debug(f"_av_transport_event: {self.uid}: av transport event handler active.") - while not sub_handler.signal.wait(1): - try: - event = sub_handler.event.events.get(timeout=0.5) - self._av_transport_event = event + if sub_handler is None: + self.logger.error(f"_av_transport_event: SubscriptionHandler is None.") - # set streaming type - if self.soco.is_playing_line_in: + self.logger.dbghigh(f"_av_transport_event: {self.uid}: av transport event handler active.") + while not sub_handler.signal.wait(1): + self.logger.dbgmed(f"_av_transport_event: {self.uid}: start try") + + try: + event = sub_handler.event.events.get(timeout=0.5) + except Empty: + #self.logger.dbglow(f"av_transport_event: got empty exception, which is normal") + pass + except Exception as e: + self.logger.error(f"_av_tranport_event: Exception during events.get(): {e}") + else: + + self.logger.dbghigh(f"_av_transport_event: {self.uid}: received event") + + # set streaming type + try: + is_playing_line_in = self.soco.is_playing_line_in + is_playing_tv = self.soco.is_playing_tv + is_playing_radio = self.soco.is_playing_radio + except Exception as e: + self.logger.error(f"_av_tranport_event: Exception during soco.get functions: {e}") + else: + if is_playing_line_in: self.streamtype = "line_in" - elif self.soco.is_playing_tv: + elif is_playing_tv: self.streamtype = "tv" - elif self.soco.is_playing_radio: + elif is_playing_radio: self.streamtype = "radio" else: self.streamtype = "music" - if 'transport_state' in event.variables: - transport_state = event.variables['transport_state'] - if transport_state: - self.handle_transport_state(transport_state) - if 'current_crossfade_mode' in event.variables: - self.cross_fade = bool(event.variables['current_crossfade_mode']) - if 'sleep_timer_generation' in event.variables: - if int(event.variables['sleep_timer_generation']) > 0: - self.snooze = self.get_snooze() + if 'transport_state' in event.variables: + transport_state = event.variables['transport_state'] + if transport_state: + self.handle_transport_state(transport_state) + if 'current_crossfade_mode' in event.variables: + self.cross_fade = bool(event.variables['current_crossfade_mode']) + if 'sleep_timer_generation' in event.variables: + if int(event.variables['sleep_timer_generation']) > 0: + self.snooze = self.get_snooze() + else: + self.snooze = 0 + if 'current_play_mode' in event.variables: + self.play_mode = event.variables['current_play_mode'] + if 'current_track_uri' in event.variables: + track_uri = event.variables['current_track_uri'] + if re.match(r'^x-rincon:RINCON_', track_uri) is not None: + # slave call, set uri to the coordinator track uri + if self._check_property(): + self.track_uri = sonos_speaker[self.coordinator].track_uri else: - self.snooze = 0 - if 'current_play_mode' in event.variables: - self.play_mode = event.variables['current_play_mode'] - if 'current_track_uri' in event.variables: - track_uri = event.variables['current_track_uri'] - if re.match(r'^x-rincon:RINCON_', track_uri) is not None: - # slave call, set uri to the coordinator track uri - if self._check_property(): - self.track_uri = sonos_speaker[self.coordinator].track_uri - else: - self.track_uri = '' + self.track_uri = '' + else: + self.track_uri = track_uri + # empty track is a trigger to reset some other props + if not self.track_uri: + self.track_artist = '' + self.track_album = '' + self.track_album_art = '' + self.track_title = '' + self.radio_show = '' + self.radio_station = '' + if 'current_track' in event.variables: + self.current_track = event.variables['current_track'] + else: + self.current_track = 0 + if 'number_of_tracks' in event.variables: + self.number_of_tracks = event.variables['number_of_tracks'] + else: + self.number_of_tracks = 0 + if 'current_track_duration' in event.variables: + self.current_track_duration = event.variables['current_track_duration'] + else: + self.current_track_duration = '' + + # don't do an else here: these value won't always be updated + if 'current_transport_actions' in event.variables: + self.current_transport_actions = event.variables['current_transport_actions'] + if 'current_valid_play_modes' in event.variables: + self.current_valid_play_modes = event.variables['current_valid_play_modes'] + if 'current_track_meta_data' in event.variables: + if event.variables['current_track_meta_data']: + # we have some different data structures, handle it + if isinstance(event.variables['current_track_meta_data'], DidlMusicTrack): + metadata = event.variables['current_track_meta_data'].__dict__ + elif isinstance(event.variables['current_track_meta_data'], DidlItem): + metadata = event.variables['current_track_meta_data'].__dict__ + else: + metadata = event.variables['current_track_meta_data'].metadata + if 'creator' in metadata: + self.track_artist = metadata['creator'] else: - self.track_uri = track_uri - # empty track is a trigger to reset some other props - if not self.track_uri: self.track_artist = '' - self.track_album = '' - self.track_album_art = '' + if 'title' in metadata: + # ignore x-sonos-api-stream: radio played, title seems wrong + if re.match(r"^x-sonosapi-stream:", metadata['title']) is None: + self.track_title = metadata['title'] + else: self.track_title = '' - self.radio_show = '' - self.radio_station = '' - - if 'current_track' in event.variables: - self.current_track = event.variables['current_track'] - else: - self.current_track = 0 - if 'number_of_tracks' in event.variables: - self.number_of_tracks = event.variables['number_of_tracks'] - else: - self.number_of_tracks = 0 - if 'current_track_duration' in event.variables: - self.current_track_duration = event.variables['current_track_duration'] - else: - self.current_track_duration = '' - - # don't do an else here: these value won't always be updated - if 'current_transport_actions' in event.variables: - self.current_transport_actions = event.variables['current_transport_actions'] - if 'current_valid_play_modes' in event.variables: - self.current_valid_play_modes = event.variables['current_valid_play_modes'] - - if 'current_track_meta_data' in event.variables: - if event.variables['current_track_meta_data']: - # we have some different data structures, handle it - if isinstance(event.variables['current_track_meta_data'], DidlMusicTrack): - metadata = event.variables['current_track_meta_data'].__dict__ - elif isinstance(event.variables['current_track_meta_data'], DidlItem): - metadata = event.variables['current_track_meta_data'].__dict__ - else: - metadata = event.variables['current_track_meta_data'].metadata - if 'creator' in metadata: - self.track_artist = metadata['creator'] - else: - self.track_artist = '' - if 'title' in metadata: - # ignore x-sonos-api-stream: radio played, title seems wrong - if re.match(r"^x-sonosapi-stream:", metadata['title']) is None: - self.track_title = metadata['title'] - else: - self.track_title = '' - if 'album' in metadata: - self.track_album = metadata['album'] - else: - self.track_album = '' - if 'album_art_uri' in metadata: - cover_url = metadata['album_art_uri'] - if not cover_url.startswith(('http:', 'https:')): - self.track_album_art = 'http://' + self.soco.ip_address + ':1400' + cover_url - else: - self.track_album_art = cover_url + if 'album' in metadata: + self.track_album = metadata['album'] + else: + self.track_album = '' + if 'album_art_uri' in metadata: + cover_url = metadata['album_art_uri'] + if not cover_url.startswith(('http:', 'https:')): + self.track_album_art = 'http://' + self.soco.ip_address + ':1400' + cover_url else: - self.track_album_art = '' + self.track_album_art = cover_url + else: + self.track_album_art = '' - if 'stream_content' in metadata: - stream_content = metadata['stream_content'].title() - if not stream_content.lower() in \ - ['zpstr_buffering', 'zpstr_connecting', 'x-sonosapi-stream']: - self.stream_content = stream_content - else: - self.stream_content = "" + if 'stream_content' in metadata: + stream_content = metadata['stream_content'].title() + if not stream_content.lower() in \ + ['zpstr_buffering', 'zpstr_connecting', 'x-sonosapi-stream']: + self.stream_content = stream_content else: - self.stream_content = '' - if 'radio_show' in metadata: - radio_show = metadata['radio_show'] - if radio_show: - radio_show = radio_show.split(',p', 1) - if len(radio_show) > 1: - self.radio_show = radio_show[0] - else: - self.radio_show = '' + self.stream_content = "" + else: + self.stream_content = '' + if 'radio_show' in metadata: + radio_show = metadata['radio_show'] + if radio_show: + radio_show = radio_show.split(',p', 1) + if len(radio_show) > 1: + self.radio_show = radio_show[0] else: self.radio_show = '' + else: + self.radio_show = '' - if self.streamtype == 'radio': - # we need the title from 'enqueued_transport_uri_meta_data' - if 'enqueued_transport_uri_meta_data' in event.variables: - radio_metadata = event.variables['enqueued_transport_uri_meta_data'] - if isinstance(radio_metadata, str): - radio_station = radio_metadata[radio_metadata.find('') + 10:radio_metadata.find('')] - elif hasattr(radio_metadata, 'title'): - radio_station = str(radio_metadata.title) - else: - radio_station = "" - self.radio_station = radio_station - else: - self.radio_station = '' + if self.streamtype == 'radio': + # we need the title from 'enqueued_transport_uri_meta_data' + if 'enqueued_transport_uri_meta_data' in event.variables: + radio_metadata = event.variables['enqueued_transport_uri_meta_data'] + if isinstance(radio_metadata, str): + radio_station = radio_metadata[radio_metadata.find('') + 10:radio_metadata.find('')] + elif hasattr(radio_metadata, 'title'): + radio_station = str(radio_metadata.title) + else: + radio_station = "" + self.radio_station = radio_station + else: + self.radio_station = '' + + sub_handler.event.events.task_done() + del event + self.logger.dbghigh(f"av_transport_event() for {self.uid}: task_done()") - sub_handler.event.events.task_done() - del event - except Empty: - pass - except Exception as ex: - self.logger.error(f"_av_transport_event: Error {ex} occurred.") + self.logger.dbghigh(f"av_transport_event(): {self.uid}: while loop terminated.") def _check_property(self): if not self.is_initialized: @@ -1320,7 +1416,7 @@ def zone_group_members(self, value: list) -> None: :param value: list with uids to set as group members """ if not isinstance(value, list): - self.logger.warning(f"zone_group_members: {self.uid}: value={value} for setter zone_group_members must be type of list.") + self.logger.error(f"zone_group_members: {self.uid}: value={value} for setter zone_group_members must be type of list.") return self._members = value @@ -1332,19 +1428,31 @@ def zone_group_members(self, value: list) -> None: if self.is_coordinator: for member in self._zone_group_members: - self.logger.debug(f"****zone_group_members: {member=}") + self.logger.dbglow(f"****zone_group_members: {member=}") if member is not self: try: - self.logger.debug(f"Unsubscribe av event for uid '{self.uid}' in fct zone_group_members") + self.logger.dbghigh(f"zone_group_members(): Unsubscribe av event for uid '{self.uid}' in fct zone_group_members") member.av_subscription.unsubscribe() except Exception as e: - self.logger.info(f"Unsubscribe av event for uid '{self.uid}' in fct zone_group_members caused error {e}") + self.logger.warning(f"Unsubscribe av event for uid '{self.uid}' in fct zone_group_members caused error {e}") pass else: - # Why are the member speakers un- and subscribed again? - self.logger.debug(f"Un/Subscribe av event for uid '{self.uid}' in fct zone_group_members") - member.av_subscription.unsubscribe() - member.av_subscription.subscribe() + # Register AV event for coordinator speakers: + #self.logger.dbglow(f"Un/Subscribe av event for uid '{self.uid}' in fct zone_group_members") + + active = member.av_subscription.subscriptionThreadIsActive + is_subscribed = member.av_subscription.is_subscribed + self.logger.dbghigh(f"zone_group_members(): Subscribe av event for uid '{self.uid}': Status before measure: AV Thread is {active}, subscription is {is_subscribed}, Eventflag: {member.av_subscription.eventSignalIsSet}") + + if active == False: + self.logger.dbghigh(f"zone_group_members: Subscribe av event for uid '{self.uid}' because thread is not active") + #member.av_subscription.unsubscribe() + # + # Workaround: + # member.av_subscription.update_endpoint(endpoint=self._av_transport_event) + member.av_subscription.subscribe() + self.logger.dbghigh(f"zone_group_members: Subscribe av event for uid '{self.uid}': Status after measure: AV thread is {member.av_subscription.subscriptionThreadIsActive}, subscription {member.av_subscription.is_subscribed}, Eventflag: {member.av_subscription.eventSignalIsSet}") + @property def streamtype(self) -> str: @@ -2189,6 +2297,7 @@ def play_sonos_radio(self, station_name: str, start: bool = True) -> None: self.logger.warning(msg) return False return True + def _play_radio(self, station_name: str, music_service: str = 'TuneIn', start: bool = True) -> tuple: """ @@ -2283,6 +2392,7 @@ def _play_radio(self, station_name: str, music_service: str = 'TuneIn', start: b self.soco.play_uri(uri=uri, meta=metadata, title=the_station.title, start=start, force_radio=True) return True, "" + def play_sharelink(self, url: str, start: bool = True) -> None: """ Plays a sharelink from a given url @@ -2769,7 +2879,7 @@ class Sonos(SmartPlugin): """ Main class of the Plugin. Does all plugin specific stuff """ - PLUGIN_VERSION = "1.8.1" + PLUGIN_VERSION = "1.8.2" def __init__(self, sh): """Initializes the plugin.""" @@ -2813,7 +2923,7 @@ def __init__(self, sh): # init TTS if self._tts: if self._init_tts(webservice_ip, webservice_port, local_webservice_path, local_webservice_path_snippet): - self.logger.info(f"TTS successful enabled") + self.logger.info(f"TTS successfully enabled") else: self.logger.info(f"TTS initialisation failed.") @@ -3118,16 +3228,27 @@ def _parse_speaker_ips(self, speaker_ips: list) -> list: # return unique items in list return utils.unique_list(self._speaker_ips) + + def debug_speaker(self, uid): + self.logger.warning(f"debug_speaker: Starting function for uid {uid}") + #sonos_speaker[uid].set_stop() + self.logger.warning(f"debug_speaker: check sonos_speaker[uid].av.subscription: {sonos_speaker[uid].av_subscription}") + # Event objekt is not callable: + #sonos_speaker[uid]._av_transport_event(sonos_speaker[uid].av_subscription) + self.logger.warning(f"debug_speaker: av_subscription: thread active {sonos_speaker[uid].av_subscription.subscriptionThreadIsActive}, eventSignal: {sonos_speaker[uid].av_subscription.eventSignalIsSet}") + + def get_soco_version(self) -> str: """ Get version of used Soco and return it """ - + try: src = io.open('plugins/sonos/soco/__init__.py', encoding='utf-8').read() metadata = dict(re.findall("__([a-z]+)__ = \"([^\"]+)\"", src)) - except Exception: - self.logger.warning(f"Version of used Soco module not available") + except Exception as e: + self.logger.warning(f"Version of used Soco module not available. Exception: {e}") + self.logger.warning(f"DEBUG get socoversion: Current dir: {os.getcwd()}") return '' else: soco_version = metadata['version'] @@ -3452,7 +3573,7 @@ def _discover(self, force: bool = False) -> None: uid = uid.lower() if self._is_speaker_up(uid, zone.ip_address): - self.logger.debug(f"Speaker found: {zone.ip_address}, {uid}") + self.logger.dbglow(f"Speaker found: {zone.ip_address}, {uid}") online_speaker_count = online_speaker_count + 1 if uid in sonos_speaker: try: @@ -3461,11 +3582,11 @@ def _discover(self, force: bool = False) -> None: self.logger.warning(f"Exception in discover -> sonos_speaker[uid].soco: {e}") else: if zone is not zone_compare: - self.logger.debug(f"zone is not in speaker list, yet. Adding and subscribing zone {zone}.") + self.logger.dbghigh(f"zone is not in speaker list, yet. Adding and subscribing zone {zone}.") sonos_speaker[uid].soco = zone sonos_speaker[uid].subscribe_base_events() else: - self.logger.debug(f"SoCo instance {zone} already initiated, skipping.") + self.logger.dbglow(f"SoCo instance {zone} already initiated, skipping.") # The following check subscriptions functions triggers an unsubscribe/subscribe. However, this causes # a massive memory leak increasing with every check_subscription call. # self.logger.debug("checking subscriptions") @@ -3481,15 +3602,15 @@ def _discover(self, force: bool = False) -> None: else: # Speaker is not online. Disposing... if sonos_speaker[uid].soco is not None: - self.logger.debug(f"Disposing offline speaker: {zone.ip_address}, {uid}") + self.logger.dbghigh(f"Disposing offline speaker: {zone.ip_address}, {uid}") sonos_speaker[uid].dispose() else: - self.logger.debug(f"Ignoring offline speaker: {zone.ip_address}, {uid}") + self.logger.info(f"Ignoring offline speaker: {zone.ip_address}, {uid}") sonos_speaker[uid].is_initialized = False if uid in sonos_speaker: - self.logger.debug(f"setting {zone.ip_address}, uid {uid} to handled speaker") + self.logger.dbglow(f"setting {zone.ip_address}, uid {uid} to handled speaker") handled_speaker[uid] = sonos_speaker[uid] else: self.logger.debug(f"ip {zone.ip_address}, uid {uid} is not in sonos_speaker") @@ -3497,7 +3618,7 @@ def _discover(self, force: bool = False) -> None: # dispose every speaker that was not found for uid in set(sonos_speaker.keys()) - set(handled_speaker.keys()): if sonos_speaker[uid].soco is not None: - self.logger.debug(f"Removing undiscovered speaker: {sonos_speaker[uid].ip_address}, {uid}") + self.logger.warning(f"Removing/disposing undiscovered speaker: {sonos_speaker[uid].ip_address}, {uid}") sonos_speaker[uid].dispose() # Extract number of online speakers: diff --git a/sonos/plugin.yaml b/sonos/plugin.yaml index 3845bbb4e..3f60f0aa7 100755 --- a/sonos/plugin.yaml +++ b/sonos/plugin.yaml @@ -12,7 +12,7 @@ plugin: documentation: https://github.com/smarthomeNG/plugins/blob/master/sonos/README.md support: https://knx-user-forum.de/forum/supportforen/smarthome-py/25151-sonos-anbindung - version: 1.8.1 # Plugin version + version: 1.8.2 # Plugin version sh_minversion: 1.5.1 # minimum shNG version to use this plugin py_minversion: 3.8 # minimum Python version to use for this plugin multi_instance: False # plugin supports multi instance @@ -85,6 +85,27 @@ parameters: de: "(optional) Verlängert die Dauer von Snippet Audio Dateien um einen festen Offset in Sekunden." en: "(optional) Extend snippet duration by a fixed offset specified in seconds" + webif_pagelength: + type: int + default: 0 + valid_list: + - -1 + - 0 + - 25 + - 50 + - 100 + description: + de: 'Anzahl an Items, die standardmäßig in einer Web Interface Tabelle pro Seite angezeigt werden. + 0 = automatisch, -1 = alle' + en: 'Amount of items being listed in a web interface table per page by default. + 0 = automatic, -1 = all' + description_long: + de: 'Anzahl an Items, die standardmäßig in einer Web Interface Tabelle pro Seite angezeigt werden.\n + Bei 0 wird die Tabelle automatisch an die Höhe des Browserfensters angepasst.\n + Bei -1 werden alle Tabelleneinträge auf einer Seite angezeigt.' + en: 'Amount of items being listed in a web interface table per page by default.\n + 0 adjusts the table height automatically based on the height of the browser windows.\n + -1 shows all table entries on one page.' item_attributes: sonos_uid: @@ -356,12 +377,12 @@ item_structs: type: num sonos_recv: snooze sonos_send: snooze - + status_light: type: bool sonos_recv: status_light sonos_send: status_light - + join: type: str sonos_send: join @@ -539,17 +560,17 @@ item_structs: type: str sonos_send: play_tts enforce_updates: True - + tts_language: type: str initial_value: de sonos_attrib: tts_language - + tts_volume: type: num initial_value: -1 sonos_attrib: tts_volume - + tts_fade_in: type: bool sonos_attrib: tts_fade_in @@ -558,12 +579,12 @@ item_structs: type: str sonos_send: play_snippet enforce_updates: True - + snippet_volume: type: num initial_value: 25 sonos_attrib: snippet_volume - + snippet_fade_in: type: bool initial_value: True diff --git a/sonos/soco/__init__.py b/sonos/soco/__init__.py index 4c89d7f92..573076afc 100755 --- a/sonos/soco/__init__.py +++ b/sonos/soco/__init__.py @@ -17,7 +17,7 @@ __author__ = "The SoCo-Team " # Please increment the version number and add the suffix "-dev" after # a release, to make it possible to identify in-development code -__version__ = "0.29.0" +__version__ = "0.29.1" __website__ = "https://github.com/SoCo/SoCo" __license__ = "MIT License" diff --git a/sonos/soco/events.py b/sonos/soco/events.py index 16299ff98..6a48b08b6 100755 --- a/sonos/soco/events.py +++ b/sonos/soco/events.py @@ -366,6 +366,8 @@ def run(self): auto_renew_thread = AutoRenewThread( interval, self._auto_renew_thread_flag, self ) + debugName = 'SonosAutoRenewThread' + auto_renew_thread.setName(debugName) auto_renew_thread.start() def _auto_renew_cancel(self): @@ -410,7 +412,6 @@ def _request(self, method, url, headers, success, unconditional=None): # pylint: disable=inconsistent-return-statements def _wrap(self, method, strict, *args, **kwargs): - """This is a wrapper for `Subscription.subscribe`, `Subscription.renew` and `Subscription.unsubscribe` which: @@ -437,7 +438,6 @@ def _wrap(self, method, strict, *args, **kwargs): # A lock is used, because autorenewal occurs in # a thread with self._lock: - try: method(*args, **kwargs) diff --git a/sonos/soco/events_asyncio.py b/sonos/soco/events_asyncio.py index 73cf8c50c..a991a1b90 100755 --- a/sonos/soco/events_asyncio.py +++ b/sonos/soco/events_asyncio.py @@ -281,6 +281,7 @@ async def _async_start(self): async def async_stop(self): """Stop the listener.""" + self.is_running = False if self.site: await self.site.stop() self.site = None diff --git a/sonos/soco/events_twisted.py b/sonos/soco/events_twisted.py index 5271383e3..d5a5749ba 100755 --- a/sonos/soco/events_twisted.py +++ b/sonos/soco/events_twisted.py @@ -383,7 +383,6 @@ def on_success(response): # pylint: disable=missing-docstring return d def _wrap(self, method, strict, *args, **kwargs): - """This is a wrapper for `Subscription.subscribe`, `Subscription.renew` and `Subscription.unsubscribe` which: diff --git a/sonos/soco/utils.py b/sonos/soco/utils.py index 0ddfeffe9..309b682f3 100755 --- a/sonos/soco/utils.py +++ b/sonos/soco/utils.py @@ -154,7 +154,6 @@ def __init__( def __call__(self, deprecated_fn): @functools.wraps(deprecated_fn) def decorated(*args, **kwargs): - message = "Call to deprecated function {}.".format(deprecated_fn.__name__) if self.will_be_removed_in is not None: message += " Will be removed in version {}.".format( diff --git a/uzsu/requirements.txt b/uzsu/requirements.txt index bca03044c..3e2a79000 100755 --- a/uzsu/requirements.txt +++ b/uzsu/requirements.txt @@ -1,4 +1,5 @@ -numpy +numpy==1.21.4;python_version=='3.7' +numpy>=1.23.4;python_version>='3.8' scipy>=1.1.0,<=1.3.0;python_version<'3.7' scipy>=1.2.0,<=1.7.3;python_version=='3.7' #scipy>=1.5.0,<=1.8.1;python_version>'3.7' diff --git a/viessmann/__init__.py b/viessmann/__init__.py index c260bf1c1..11bd79325 100755 --- a/viessmann/__init__.py +++ b/viessmann/__init__.py @@ -1414,8 +1414,9 @@ def _parse_response(self, response, commandname='', read_response=True): self.logger.debug(f'Response decoded to: commandcode: {commandcode}, responsedatacode: {responsedatacode}, valuebytecount: {valuebytecount}, responsetypecode: {responsetypecode}') self.logger.debug(f'Rawdatabytes formatted: {self._bytes2hexstring(rawdatabytes)} and unformatted: {rawdatabytes}') - # Process response for items if read response and not error - if responsedatacode == 1 and responsetypecode != 3: + # Process response for items if response and not error + # added: only in P300 or if read_response is set, do not try if KW replies with 0x00 (OK) + if responsedatacode == 1 and responsetypecode != 3 and (self._protocol == 'P300' or read_response): # parse response if command config is available commandname = self._commandname_by_commandcode(commandcode) diff --git a/webpush/__init__.py b/webpush/__init__.py index cc0c7c111..1693cdfb2 100644 --- a/webpush/__init__.py +++ b/webpush/__init__.py @@ -79,6 +79,7 @@ def __init__(self, sh): # get the parameters for the plugin (as defined in metadata plugin.yaml): self.groupList = self.get_parameter_value('grouplist') + self.varpath = self.get_parameter_value('varpath') self.groupListItem = None self.publicKeyItem = None @@ -86,8 +87,7 @@ def __init__(self, sh): self.alive = False - self.actualPath = os.getcwd() - self.pluginVarPath = self.actualPath + "/var/webpush/" + self.pluginVarPath = self.varpath + "/webpush/" self.databasePath = self.pluginVarPath + "webpush_database.txt" self.keyFilePath = self.pluginVarPath + "webpush_private_key.pem" diff --git a/webpush/plugin.yaml b/webpush/plugin.yaml index 4b2569090..df6291e47 100644 --- a/webpush/plugin.yaml +++ b/webpush/plugin.yaml @@ -9,7 +9,7 @@ plugin: # tester: # Who tests this plugin? state: develop # change to ready when done with development keywords: web, push, webpush, messages - documentation: '' + documentation: '' # Path to documentation ONLY IF OTHER than user_doc.rst and generated configuration page support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1817454-support-thread version: 1.1.0 # Plugin version (must match the version specified in __init__.py) @@ -22,12 +22,18 @@ parameters: # Definition of parameters to be configured in etc/plugin.yaml (enter 'parameters: NONE', if section should be empty) grouplist: type: list - #mandatory: True # cannot be used in conjunction with 'default' default: ["alarm", "info"] description: de: 'Nachrichten-Gruppen' en: 'Message groups' + varpath: + type: str + default: "/usr/local/smarthome/var" + description: + de: 'Pfad zum SmarthomeNG var Verzeichnis (z.B. /usr/local/smarthome/var)' + en: 'Path to the SmarthomeNG var directory (ex. /usr/local/smarthome/var)' + item_attributes: # Definition of item attributes defined by this plugin (enter 'item_attributes: NONE', if section should be empty) webpush_communication: diff --git a/webpush/user_doc.rst b/webpush/user_doc.rst index 34e475474..43ba5c4d3 100644 --- a/webpush/user_doc.rst +++ b/webpush/user_doc.rst @@ -59,6 +59,8 @@ Bitte die Dokumentation lesen, die aus den Metadaten der plugin.yaml erzeugt wur grouplist: - alarm - info + varpath: /usr/local/smarthome/var + Items ----- diff --git a/wettercom/README.md b/wettercom/README.md index 16f7a3198..c4018e8f5 100755 --- a/wettercom/README.md +++ b/wettercom/README.md @@ -1,4 +1,4 @@ -# wetter.com +# wettercom Plugin ## Requirements @@ -11,8 +11,7 @@ wetter.com account with project, recommended: 3 days, all data transmitted ```yaml wettercom: - class_name: wettercom - class_path: plugins.wettercom + plugin_name: wettercom # apikey: # project: ``` diff --git a/wunderground/README_OLD.md b/wunderground/README_OLD.md index 747977d72..0eb0935eb 100755 --- a/wunderground/README_OLD.md +++ b/wunderground/README_OLD.md @@ -52,8 +52,7 @@ You can configure multiple instances of the wunderground plugin to collect data ```yaml # for etc/plugin.yaml configuration file: weather_somewhere: - class_name: Wunderground - class_path: plugins.wunderground + plugin_name: wunderground apikey: xxxxyyyyxxxxyyyy # language: de location: Germany/Hamburg diff --git a/xmpp/README.md b/xmpp/README.md index 8d537b3f9..fc365a0e8 100755 --- a/xmpp/README.md +++ b/xmpp/README.md @@ -52,8 +52,7 @@ loggers: ```yaml xmpp: - class_name: XMPP - class_path: plugins.xmpp + plugin_name: xmpp jid: 'user account eg skender@somexmppserver.com' password: your xmpp server password #server: 127.0.0.1:5222 diff --git a/zigbee2mqtt/__init__.py b/zigbee2mqtt/__init__.py index de305260e..eeb800dc8 100755 --- a/zigbee2mqtt/__init__.py +++ b/zigbee2mqtt/__init__.py @@ -26,7 +26,6 @@ import logging from lib.model.mqttplugin import * -from lib.item import Items from lib.utils import Utils from .webif import WebInterface @@ -36,7 +35,7 @@ class Zigbee2Mqtt(MqttPlugin): Main class of the Plugin. Does all plugin specific stuff and provides the update functions for the items """ - PLUGIN_VERSION = '1.1.1' + PLUGIN_VERSION = '1.1.2' def __init__(self, sh): """ diff --git a/zigbee2mqtt/plugin.yaml b/zigbee2mqtt/plugin.yaml index 6747491d6..7df947ef6 100755 --- a/zigbee2mqtt/plugin.yaml +++ b/zigbee2mqtt/plugin.yaml @@ -9,10 +9,10 @@ plugin: tester: Michael Wenzel # Who tests this plugin? state: develop # change to ready when done with development keywords: iot - documentation: - support: + documentation: '' + support: https://knx-user-forum.de/forum/supportforen/smarthome-py/1856775-support-thread-f%C3%BCr-das-zigbee2mqtt-plugin - version: 1.1.1 # Plugin version + version: 1.1.2 # Plugin version sh_minversion: 1.8.2 # minimum shNG version to use this plugin # sh_maxversion: # maximum shNG version to use this plugin (leave empty if latest) py_minversion: 3.8 # minimum Python version to use for this plugin