From 4fc23b2170696a985eb72e19405909bd013ed527 Mon Sep 17 00:00:00 2001
From: Magdalena Kasenberg
Date: Tue, 2 Jul 2024 16:12:58 +0200
Subject: [PATCH] wip
---
autopts/bot/common.py | 368 ++++++++++++++++++++++++--
autopts/bot/common_features/report.py | 108 ++------
autopts/bot/mynewt.py | 186 ++-----------
autopts/bot/zephyr.py | 260 ++----------------
autopts/client.py | 8 +-
autopts/config.py | 31 ++-
autoptsserver.py | 7 +
test/unittests.py | 4 +-
tools/cron/common.py | 10 +-
9 files changed, 470 insertions(+), 512 deletions(-)
diff --git a/autopts/bot/common.py b/autopts/bot/common.py
index f9573fccbe..2965e6f732 100644
--- a/autopts/bot/common.py
+++ b/autopts/bot/common.py
@@ -12,7 +12,9 @@
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
+import collections
import copy
+import datetime
import importlib
import logging
import os
@@ -21,20 +23,19 @@
import shutil
import time
import json
+import traceback
from pathlib import Path
from argparse import Namespace
from autopts import client as autoptsclient
+from autopts.bot.common_features import github, report, mail, google_drive
+from autopts.bot.common_features.report import REPORT_TXT
from autopts.client import CliParser, Client, TestCaseRunStats, init_logging
from autopts.config import MAX_SERVER_RESTART_TIME, TEST_CASES_JSON, ALL_STATS_JSON, TC_STATS_JSON, \
- ALL_STATS_RESULTS_XML, TC_STATS_RESULTS_XML, BOT_STATE_JSON
-from autopts.ptsprojects.boards import get_free_device, get_tty, get_debugger_snr
+ ALL_STATS_RESULTS_XML, TC_STATS_RESULTS_XML, BOT_STATE_JSON, TMP_DIR, REPORT_README_MD, AUTOPTS_REPORT_FOLDER, \
+ REPORT_DIFF_TXT, REPORT_XLSX, IUT_LOGS_FOLDER, AUTOPTS_ROOT_DIR
+from autopts.ptsprojects.boards import get_free_device, get_tty, get_debugger_snr, release_device
from autopts.ptsprojects.testcase_db import DATABASE_FILE
-PROJECT_DIR = os.path.dirname( # auto-pts repo directory
- os.path.dirname( # autopts module directory
- os.path.dirname( # bot module directory
- os.path.abspath(__file__)))) # this file directory
-
log = logging.debug
@@ -52,6 +53,27 @@ def cleanup_tmp_files():
os.remove(file)
+def get_deepest_dirs(logs_tree, dst_tree, max_depth):
+ def recursive(directory, depth=3):
+ depth -= 1
+
+ for file in os.scandir(directory):
+ if file.is_dir():
+ if depth > 0:
+ recursive(file.path, depth)
+ else:
+ dst_file = os.path.join(dst_tree, file.name)
+ try:
+ shutil.move(file.path, dst_file)
+ except BaseException as e: # skip waiting for BPV to release the file
+ try:
+ shutil.copy(file.path, dst_file)
+ except BaseException as e2:
+ print(e2)
+
+ recursive(logs_tree, max_depth)
+
+
class BuildAndFlashException(Exception):
pass
@@ -401,22 +423,338 @@ def run_test_cases(self):
self.ptses[0].get_test_case_description(project_name, test_case_name)
all_stats.update_descriptions(descriptions)
- all_stats.pts_ver = '{}'.format(self.ptses[0].get_version())
- all_stats.platform = '{}'.format(self.ptses[0].get_system_model())
+ all_stats.pts_ver = str(self.ptses[0].get_version())
+ all_stats.platform = str(self.ptses[0].get_system_model())
+ all_stats.system_version = str(self.ptses[0].get_system_version())
except:
log('Failed to generate some stats.')
return all_stats
def start(self, args=None):
- # Extend this method in a derived class to handle sending
- # logs, reports, etc.
- self.run_tests()
+ """
+ Extend this method in a derived class, if needed, to handle
+ sending logs, reports, etc.
+ """
+
+ if os.path.exists(BOT_STATE_JSON):
+ print(f'Continuing the previous terminated test run (remove {TMP_DIR} to start freshly)')
+
+ with open(BOT_STATE_JSON, "r") as f:
+ data = f.read()
+ bot_state = json.loads(data)
+ cfg = bot_state['config']
+ args = cfg['auto_pts']
+
+ else:
+ # Start fresh test run
+
+ bot_state = {}
+ pre_cleanup()
+
+ bot_state['start_time'] = time.time()
+
+ cfg = self.bot_config
+ args = cfg['auto_pts']
+ bot_state['config'] = cfg
+
+ if 'database_file' not in args:
+ args['database_file'] = DATABASE_FILE
+
+ if 'githubdrive' in cfg:
+ github.update_sources(cfg['githubdrive']['path'],
+ cfg['githubdrive']['remote'],
+ cfg['githubdrive']['branch'], True)
+
+ if 'git' in cfg:
+ bot_state['repos_info'] = github.update_repos(args['project_path'], cfg["git"])
+ bot_state['repo_status'] = report.make_repo_status(bot_state['repos_info'])
+ else:
+ bot_state['repos_info'] = {}
+ bot_state['repo_status'] = ''
+
+ if args.get('use_backup', False):
+ os.makedirs(os.path.dirname(TMP_DIR), exist_ok=True)
+
+ with open(BOT_STATE_JSON, "w") as f:
+ f.write(json.dumps(bot_state, indent=4))
+
+ try:
+ stats = self.run_tests()
+ finally:
+ release_device(self.args.tty_file)
+
+ report_data = bot_state
+ report_data['end_time'] = time.time()
+ report_data['end_time_stamp'] = datetime.datetime.fromtimestamp(
+ report_data['end_time']).strftime("%Y_%m_%d_%H_%M_%S")
+ report_data['start_time_stamp'] = datetime.datetime.fromtimestamp(
+ bot_state['start_time']).strftime("%Y_%m_%d_%H_%M_%S")
+
+ report_data['status_count'] = stats.get_status_count()
+ report_data['tc_results'] = stats.get_results()
+ report_data['descriptions'] = stats.get_descriptions()
+ report_data['regressions'] = stats.get_regressions()
+ report_data['progresses'] = stats.get_progresses()
+ report_data['new_cases'] = stats.get_new_cases()
+ report_data['deleted_cases'] = []
+ report_data['pts_ver'] = stats.pts_ver
+ report_data['platform'] = stats.platform
+ report_data['system_version'] = stats.system_version
+ report_data['database_file'] = args['database_file']
+
+ report_data['tc_results'] = collections.OrderedDict(sorted(report_data['tc_results'].items()))
+
+ report_data['errata'] = report.get_errata(self.autopts_project_name)
+
+ report_data['pts_logs_folder'], report_data['pts_xml_folder'] = report.pull_server_logs(self.args)
+
+ report_data['report_xlsx'] = report.make_report_xlsx(report_data)
+ report_data['report_txt'] = report.make_report_txt(report_data)
+
+ if 'githubdrive' in cfg or 'gdrive' in cfg:
+ self.make_report_folder(report_data)
+
+ if 'gdrive' in cfg:
+ self.upload_logs_to_gdrive(report_data)
+
+ if 'githubdrive' in cfg:
+ self.upload_logs_to_github(report_data)
+
+ if 'mail' in cfg:
+ self.send_email(report_data)
+
+ print("Done")
def run_tests(self):
# Entry point of the simple client layer
return super().start()
+ def make_readme_md(self, report_data):
+ """Creates README.md for Github logging repo
+ """
+ readme_file = REPORT_README_MD
+
+ Path(os.path.dirname(readme_file)).mkdir(parents=True, exist_ok=True)
+
+ with open(readme_file, 'w') as f:
+ readme_body = f'''# AutoPTS report
+
+ Start time: {report_data["start_time_stamp"]}
+
+ End time: {report_data["end_time_stamp"]}
+
+ PTS version: {report_data["pts_ver"]}
+
+ Repositories:
+
+'''
+ f.write(readme_body)
+
+ for name, info in report_data['repos_info'].items():
+ f.write(f'\t{name}: {info["commit"]} [{info["desc"]}]\n')
+
+ return readme_file
+
+ def make_report_folder(self, report_data):
+ """Creates folder containing .txt and .xlsx reports, pulled logs
+ from autoptsserver, iut logs and additional README.md.
+ """
+ cfg = report_data['config']
+
+ report_data['report_folder'] = AUTOPTS_REPORT_FOLDER
+ shutil.rmtree(report_data['report_folder'], ignore_errors=True)
+ Path(report_data['report_folder']).mkdir(parents=True, exist_ok=True)
+
+ if 'githubdrive' in cfg:
+ report_folder_name = os.path.basename(report_data['report_folder'])
+
+ report_data['old_report_txt'] = os.path.join(cfg['githubdrive']['path'],
+ cfg['githubdrive']['subdir'],
+ report_folder_name, REPORT_TXT)
+
+ report_data['report_diff_txt'], report_data['deleted_cases'] = report.make_report_diff(report_data)
+
+ report_data['readme_file'] = self.make_readme_md(report_data)
+
+ attachments = [
+ REPORT_DIFF_TXT,
+ REPORT_TXT,
+ (REPORT_TXT, f'report_{report_data["start_time_stamp"]}.txt'),
+ (REPORT_XLSX, f'report_{report_data["start_time_stamp"]}.xlsx'),
+ REPORT_README_MD,
+ report_data['database_file'],
+ report_data['pts_xml_folder'],
+ ]
+
+ iut_logs_new = os.path.join(report_data['report_folder'], 'iut_logs')
+ pts_logs_new = os.path.join(report_data['report_folder'], 'pts_logs')
+ get_deepest_dirs(IUT_LOGS_FOLDER, iut_logs_new, 3)
+ get_deepest_dirs(report_data['pts_logs_folder'], pts_logs_new, 3)
+
+ self.generate_attachments(report_data, attachments)
+
+ self.pack_report_folder(report_data, attachments)
+
+ def generate_attachments(self, report_data, attachments):
+ """Overwrite this if needed"""
+ pass
+
+ def pack_report_folder(self, report_data, attachments):
+ report_dir = report_data['report_folder']
+
+ for item in attachments:
+ if isinstance(item, tuple):
+ src_file, dst_file = item
+ dst_file = os.path.join(report_dir, dst_file)
+ else:
+ src_file = item
+ dst_file = os.path.join(report_dir, os.path.basename(src_file))
+
+ try:
+ if not os.path.exists(src_file):
+ log(f'The file {src_file} does not exist')
+ continue
+
+ if os.path.isdir(src_file):
+ try:
+ shutil.move(src_file, dst_file)
+ continue
+ except: # skip waiting for BPV to release the file
+ pass
+
+ try:
+ shutil.copy(src_file, dst_file)
+ except:
+ pass
+
+ except BaseException as e:
+ traceback.print_exception(e)
+
+ def upload_logs_to_github(self, report_data):
+ log("Uploading to Github ...")
+ cfg = report_data['config']
+
+ if 'commit_msg' not in report_data:
+ report_data['commit_msg'] = report_data['start_time_stamp']
+
+ report_data['github_link'], report_data['report_folder'] = report.github_push_report(
+ report_data['report_folder'], cfg['githubdrive'], report_data['commit_msg'])
+
+ def upload_logs_to_gdrive(self, report_data):
+ cfg = report_data['config']
+ report_folder = report_data['report_folder']
+ board_name = cfg['auto_pts']['board']
+ gdrive_config = cfg['gdrive']
+
+ log(f'Archiving the report folder ...')
+ report.archive_testcases(report_folder, depth=2)
+
+ log(f'Connecting to GDrive ...')
+ drive = google_drive.Drive(gdrive_config)
+
+ log(f'Creating GDrive directory ...')
+ report_data['gdrive_url'] = drive.new_workdir(board_name)
+ log(report_data['gdrive_url'])
+
+ log("Uploading to GDrive ...")
+ drive.upload_folder(report_folder)
+
+ def send_email(self, report_data):
+ log("Sending email ...")
+
+ cfg = report_data['config']
+ descriptions = report_data['descriptions']
+
+ mail_ctx = {'repos_info': report_data['repo_status'],
+ 'summary': [mail.status_dict2summary_html(report_data['status_count'])],
+ 'log_url': [],
+ 'board': cfg['auto_pts']['board'],
+ 'platform': report_data['platform'],
+ 'pts_ver': report_data['pts_ver'],
+ 'system_version': report_data['system_version'],
+ 'additional_info': '',
+ }
+
+ mail_ctx.update(cfg['mail'])
+
+ if report_data['regressions']:
+ mail_ctx['summary'].append(mail.regressions2html(report_data['regressions'], descriptions))
+
+ if report_data['progresses']:
+ mail_ctx['summary'].append(mail.progresses2html(report_data['progresses'], descriptions))
+
+ if report_data['new_cases']:
+ mail_ctx['summary'].append(mail.new_cases2html(report_data['new_cases'], descriptions))
+
+ if report_data['deleted_cases']:
+ mail_ctx['summary'].append(mail.deleted_cases2html(report_data['deleted_cases'], descriptions))
+
+ mail_ctx['summary'] = '
'.join(mail_ctx['summary'])
+
+ if 'gdrive' in cfg and 'gdrive_url' in report_data:
+ mail_ctx['log_url'].append(mail.url2html(report_data['gdrive_url'], "Results on Google Drive"))
+
+ if 'githubdrive' in cfg and 'github_link' in report_data:
+ mail_ctx['log_url'].append(mail.url2html(report_data['github_link'], 'Results on Github'))
+
+ mail_ctx['log_url'] = '
'.join(mail_ctx['log_url'])
+
+ if not mail_ctx['log_url']:
+ mail_ctx['log_url'] = 'Not Available'
+
+ mail_ctx["elapsed_time"] = str(datetime.timedelta(
+ seconds=(int(report_data['end_time'] - report_data['start_time']))))
+
+ if 'additional_info_path' in mail_ctx:
+ try:
+ with open(mail_ctx['additional_info_path']) as file:
+ mail_ctx['additional_info'] = f'{file.read()}
'
+ except Exception as e:
+ logging.exception(e)
+
+ subject, body = self.compose_mail(mail_ctx)
+
+ mail.send_mail(cfg['mail'], subject, body, report_data['mail_attachments'])
+ # [report_data['report_xlsx'], report_data['report_txt']])
+
+ def compose_mail(self, mail_ctx):
+ """ Create a email body
+ """
+ iso_cal = datetime.date.today().isocalendar()
+ ww_dd_str = "WW%s.%s" % (iso_cal[1], iso_cal[2])
+
+ body = '''
+ This is automated email and do not reply.
+ Bluetooth test session - {ww_dd_str}
+ {additional_info}
+ 1. IUT Setup
+ Type: Zephyr
+ Board: {board}
+ Source: {repos_info}
+ 2. PTS Setup
+ OS: {system_version}
+ Platform: {platform}
+ Version: {pts_ver}
+ 3. Test Results
+ Execution Time: {elapsed_time}
+ {summary}
+ Logs
+ {log_url}
+ Sincerely,
+ {name}
+'''
+
+ if 'body' in mail_ctx:
+ body = mail_ctx['body']
+
+ body = body.format(ww_dd_str=ww_dd_str, **mail_ctx)
+
+ subject = mail_ctx.get('subject', 'AutoPTS test session results')
+ subject = f"{subject} - {ww_dd_str}"
+
+ return subject, body
+
def get_filtered_test_cases(iut_config, bot_args, config_default, pts):
_args = {}
@@ -517,7 +855,7 @@ def check_call(cmd, env=None, cwd=None, shell=True):
def get_workspace(workspace):
- for root, dirs, files in os.walk(os.path.join(PROJECT_DIR, 'autopts/workspaces'),
+ for root, dirs, files in os.walk(os.path.join(AUTOPTS_ROOT_DIR, 'autopts/workspaces'),
topdown=True):
for name in dirs:
if name == workspace:
@@ -560,11 +898,11 @@ def get_absolute_module_path(config_path):
if os.path.isfile(_path):
return _path
- _path = os.path.join(PROJECT_DIR, f'autopts/bot/{config_path}')
+ _path = os.path.join(AUTOPTS_ROOT_DIR, f'autopts/bot/{config_path}')
if os.path.isfile(_path):
return _path
- _path = os.path.join(PROJECT_DIR, f'autopts/bot/{config_path}.py')
+ _path = os.path.join(AUTOPTS_ROOT_DIR, f'autopts/bot/{config_path}.py')
if os.path.isfile(_path):
return _path
diff --git a/autopts/bot/common_features/report.py b/autopts/bot/common_features/report.py
index de67213e01..5f5db4fce5 100644
--- a/autopts/bot/common_features/report.py
+++ b/autopts/bot/common_features/report.py
@@ -29,13 +29,9 @@
from autopts.bot.common_features import github
from autopts.bot import common
from autopts.client import PtsServer
+from autopts.config import PTS_XMLS_FOLDER, TMP_DIR, REPORT_XLSX, REPORT_TXT, REPORT_DIFF_TXT, ERROR_TXT, \
+ ERRATA_DIR_PATH, AUTOPTS_ROOT_DIR
-REPORT_XLSX = "report.xlsx"
-REPORT_TXT = "report.txt"
-REPORT_DIFF_TXT = "report-diff.txt"
-ERROR_TXT = 'error.txt'
-
-ERRATA_DIR_PATH = os.path.join(common.PROJECT_DIR, 'errata')
log = logging.debug
@@ -54,7 +50,7 @@ def get_errata(project_name):
def get_autopts_version():
- repo = git.Repo(common.PROJECT_DIR)
+ repo = git.Repo(AUTOPTS_ROOT_DIR)
version = repo.git.show('-s', '--format=%H')
if repo.is_dirty():
@@ -75,8 +71,7 @@ def make_repo_status(repos_info):
# ****************************************************************************
# .xlsx spreadsheet file
# ****************************************************************************
-def make_report_xlsx(results_dict, status_dict, regressions_list,
- progresses_list, descriptions, xmls, project_name=''):
+def make_report_xlsx(report_data):
"""Creates excel file containing test cases results and summary pie chart
:param results_dict: dictionary with test cases results
:param status_dict: status dictionary, where key is status and value is
@@ -87,10 +82,18 @@ def make_report_xlsx(results_dict, status_dict, regressions_list,
:return:
"""
+ results_dict = report_data['tc_results']
+ status_dict = report_data['status_count']
+ regressions_list = report_data['regressions']
+ progresses_list = report_data['progresses']
+ descriptions = report_data['descriptions']
+ xmls = report_data['pts_xml_folder']
+ errata = report_data.get('errata', [])
+
try:
xml_list = list(os.scandir(xmls))
except FileNotFoundError as e:
- print("No XMLs found")
+ log("No XMLs found")
xml_list = None
matched_xml = ''
@@ -105,8 +108,6 @@ def find_xml_by_case(case):
matched_xml = xml.name
break
- errata = get_errata(project_name)
-
header = "AutoPTS Report: " \
"{}".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M"))
workbook = xlsxwriter.Workbook(REPORT_XLSX)
@@ -185,8 +186,7 @@ def find_xml_by_case(case):
# ****************************************************************************
# .txt result file
# ****************************************************************************
-def make_report_txt(results_dict, regressions_list,
- progresses_list, repo_status, project_name=''):
+def make_report_txt(report_data):
"""Creates txt file containing test cases results
:param results_dict: dictionary with test cases results
:param regressions_list: list of regressions found
@@ -196,11 +196,15 @@ def make_report_txt(results_dict, regressions_list,
:return: txt file path
"""
+ results_dict = report_data['tc_results']
+ regressions_list = report_data['regressions']
+ progresses_list = report_data['progresses']
+ repo_status = report_data['repo_status']
+ errata = report_data.get('errata', [])
+
filename = os.path.join(os.getcwd(), REPORT_TXT)
f = open(filename, "w")
- errata = get_errata(project_name)
-
f.write(f"{repo_status}, autopts={get_autopts_version()}\n")
for tc, result in list(results_dict.items()):
res = result[0]
@@ -217,7 +221,7 @@ def make_report_txt(results_dict, regressions_list,
if tc in errata:
result += ' - ERRATA ' + errata[tc]
- # The first id in the test case is test group
+ # The first id in the test case is a test group
tg = tc.split('/')[0]
f.write("%s%s%s\n" % (tg.ljust(8, ' '), tc.ljust(32, ' '), result))
@@ -226,60 +230,6 @@ def make_report_txt(results_dict, regressions_list,
return filename
-# ****************************************************************************
-# autopts_report result folder
-# ****************************************************************************
-def make_report_folder(iut_logs, pts_logs, xmls, report_xlsx, report_txt,
- report_diff_txt, readme_file, database_file, tag=''):
- """Creates folder containing .txt and .xlsx reports, pulled logs
- from autoptsserver, iut logs and additional README.md.
- """
-
- def get_deepest_dirs(logs_tree, dst_tree, max_depth):
- def recursive(directory, depth=3):
- depth -= 1
-
- for file in os.scandir(directory):
- if file.is_dir():
- if depth > 0:
- recursive(file.path, depth)
- else:
- dst_file = os.path.join(dst_tree, file.name)
- try:
- shutil.move(file.path, dst_file)
- except: # skip waiting for BPV to release the file
- try:
- shutil.copy(file.path, dst_file)
- except:
- pass
-
- recursive(logs_tree, max_depth)
-
- report_dir = 'tmp/autopts_report'
- shutil.rmtree(report_dir, ignore_errors=True)
- Path(report_dir).mkdir(parents=True, exist_ok=True)
-
- shutil.copy(report_diff_txt, os.path.join(report_dir, 'report-diff.txt'))
- shutil.copy(report_txt, os.path.join(report_dir, 'report.txt'))
- shutil.copy(report_txt, os.path.join(report_dir, 'report{}.txt'.format(tag)))
- shutil.copy(report_xlsx, os.path.join(report_dir, 'report{}.xlsx'.format(tag)))
- shutil.copy(readme_file, os.path.join(report_dir, 'README.md'))
- shutil.copy(database_file, os.path.join(report_dir, os.path.basename(database_file)))
-
- iut_logs_new = os.path.join(report_dir, 'iut_logs')
- pts_logs_new = os.path.join(report_dir, 'pts_logs')
- xmls_new = os.path.join(report_dir, 'XMLs/')
-
- get_deepest_dirs(iut_logs, iut_logs_new, 3)
- get_deepest_dirs(pts_logs, pts_logs_new, 3)
- try:
- shutil.move(xmls, xmls_new)
- except FileNotFoundError:
- print('XMLs directory doesn\'t exist')
-
- return os.path.join(os.getcwd(), report_dir)
-
-
def report_parse_test_cases(report):
if not os.path.exists(report):
return None
@@ -300,12 +250,12 @@ def report_parse_test_cases(report):
return test_cases[1:]
-def make_report_diff(log_git_conf, results, regressions,
- progresses, new_cases):
- old_report_txt = os.path.join(log_git_conf['path'],
- log_git_conf['subdir'],
- 'autopts_report',
- REPORT_TXT)
+def make_report_diff(report_data):
+ old_report_txt = report_data['old_report_txt']
+ results = report_data['tc_results']
+ regressions = report_data['regressions']
+ progresses = report_data['progresses']
+ new_cases = report_data['new_cases']
filename = os.path.join(os.getcwd(), REPORT_DIFF_TXT)
f = open(filename, "w")
@@ -432,8 +382,8 @@ def pull_server_logs(args):
else:
workspace_dir = workspace_name
- logs_folder = 'tmp/' + workspace_name
- xml_folder = 'tmp/XMLs'
+ logs_folder = os.path.join(TMP_DIR, workspace_name)
+ xml_folder = PTS_XMLS_FOLDER
shutil.rmtree(logs_folder, ignore_errors=True)
shutil.rmtree(xml_folder, ignore_errors=True)
Path(xml_folder).mkdir(parents=True, exist_ok=True)
diff --git a/autopts/bot/mynewt.py b/autopts/bot/mynewt.py
index f24a68d9b4..f3aa197d23 100755
--- a/autopts/bot/mynewt.py
+++ b/autopts/bot/mynewt.py
@@ -13,8 +13,6 @@
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
-import collections
-import datetime
import importlib
import logging
import os
@@ -26,12 +24,11 @@
from autopts import bot
from autopts.bot.common import BuildAndFlashException
-from autopts.bot.common_features.github import update_sources
from autopts.client import Client
-from autopts.ptsprojects.boards import release_device, get_build_and_flash, get_board_type
+from autopts.config import BOT_STATE_JSON
+from autopts.ptsprojects.boards import get_build_and_flash, get_board_type
from autopts.ptsprojects.mynewt.iutctl import get_iut, log
-from autopts.ptsprojects.testcase_db import DATABASE_FILE
-from autopts.bot.common_features import github, report, mail, google_drive
+from autopts.bot.common_features import report
PROJECT_NAME = Path(__file__).stem
@@ -92,48 +89,6 @@ def get_build_info_file(project_path):
return file_name
-def compose_mail(args, mail_cfg, mail_ctx):
- """ Create a email body
- """
-
- additional_info = ''
- if 'additional_info_path' in mail_cfg:
- try:
- with open(mail_cfg['additional_info_path']) as file:
- additional_info = f'{file.read()}
'
- except Exception as e:
- logging.exception(e)
-
- body = f'''
- This is automated email and do not reply.
- Bluetooth test session
- {additional_info}
- 1. IUT Setup
- Board: {args["board"]}
- Source: {mail_ctx["mynewt_repo_status"]}
- 2. PTS Setup
- OS: Windows 10
- Platform: {args['platform']}
- Version: {args['pts_ver']}
- 3. Test Results
- Execution Time: {mail_ctx["elapsed_time"]}
- {mail_ctx["summary"]}
- {mail_ctx["regression"]}
- {mail_ctx["progresses"]}
- Logs
- {mail_ctx["log_url"]}
- Sincerely,
- {mail_cfg['name']}
-'''
-
- if 'subject' in mail_cfg:
- subject = mail_cfg['subject']
- else:
- subject = "[Mynewt Nimble] AutoPTS test session results"
-
- return subject, body
-
-
class MynewtBotConfigArgs(bot.common.BotConfigArgs):
def __init__(self, args):
super().__init__(args)
@@ -179,124 +134,37 @@ def apply_config(self, args, config, value):
time.sleep(10)
def start(self, args=None):
- main(self)
-
-
-class MynewtClient(Client):
- def __init__(self):
- super().__init__(get_iut, sys.modules['autopts.ptsprojects.zephyr'], 'mynewt')
-
-
-BotClient = MynewtBotClient
-
-
-def main(bot_client):
- print("Mynewt bot start!")
-
- if sys.platform == 'win32':
- if 'MSYS2_BASH_PATH' not in os.environ:
- print('Set environmental variable MSYS2_BASH_PATH.')
- return 0
- # In case wsl was configured and its bash has higher prio than msys2 bash
- os.environ['PATH'] = '/usr/bin:' + os.environ['PATH']
-
- bot.common.pre_cleanup()
-
- start_time = time.time()
-
- cfg = bot_client.bot_config
- args = cfg['auto_pts']
+ print("Mynewt bot start!")
- if 'database_file' not in args:
- args['database_file'] = DATABASE_FILE
+ if sys.platform == 'win32':
+ if 'MSYS2_BASH_PATH' not in os.environ:
+ print('Set environmental variable MSYS2_BASH_PATH.')
+ return 0
+ # In case wsl was configured and its bash has higher prio than msys2 bash
+ os.environ['PATH'] = '/usr/bin:' + os.environ['PATH']
- if 'githubdrive' in cfg:
- update_sources(cfg['githubdrive']['path'],
- cfg['githubdrive']['remote'],
- cfg['githubdrive']['branch'], True)
+ if not os.path.exists(BOT_STATE_JSON):
+ if args.get('newt_upgrade', False):
+ bot.common.check_call(['newt', 'upgrade', '-f', '--shallow=0'], cwd=args['project_path'])
- if args.get('newt_upgrade', False):
- bot.common.check_call(['newt', 'upgrade', '-f', '--shallow=0'], cwd=args['project_path'])
+ super().start(args)
- if 'git' in cfg:
- repos_info = github.update_repos(args['project_path'], cfg["git"])
- repo_status = report.make_repo_status(repos_info)
- else:
- repo_status = ''
+ def generate_attachments(self, report_data, attachments):
+ cfg = report_data['config']
+ project_path = os.path.abspath(cfg['auto_pts']['project_path'])
+ build_info_file = get_build_info_file(project_path)
+ attachments.append(build_info_file)
- try:
- stats = bot_client.run_tests()
- finally:
- release_device(bot_client.args.tty_file)
+ def compose_mail(self, mail_ctx):
+ if 'subject' not in mail_ctx:
+ mail_ctx['subject'] = "[Mynewt Nimble] AutoPTS test session results"
- summary = stats.get_status_count()
- results = stats.get_results()
- descriptions = stats.get_descriptions()
- regressions = stats.get_regressions()
- progresses = stats.get_progresses()
- args['pts_ver'] = stats.pts_ver
- args['platform'] = stats.platform
+ super().compose_mail(mail_ctx)
- results = collections.OrderedDict(sorted(results.items()))
- pts_logs, xmls = report.pull_server_logs(bot_client.args)
-
- report_file = report.make_report_xlsx(results, summary, regressions,
- progresses, descriptions, xmls, PROJECT_NAME)
- report_txt = report.make_report_txt(results, regressions,
- progresses, repo_status, PROJECT_NAME)
- logs_folder = report.archive_testcases("logs")
-
- build_info_file = get_build_info_file(os.path.abspath(args['project_path']))
-
- end_time = time.time()
- url = None
-
- if 'gdrive' in cfg:
- drive = google_drive.Drive(cfg['gdrive'])
- url = drive.new_workdir(args['board'])
- drive.upload(report_file)
- drive.upload(report_txt)
- drive.upload_folder(logs_folder)
- drive.upload(build_info_file)
- drive.upload(args['database_file'])
- drive.upload_folder(pts_logs)
-
- if 'mail' in cfg:
- print("Sending email ...")
-
- # keep mail related context to simplify the code
- mail_ctx = {"summary": mail.status_dict2summary_html(summary),
- "regression": mail.regressions2html(regressions,
- descriptions),
- "progresses": mail.progresses2html(progresses,
- descriptions),
- "mynewt_repo_status": repo_status}
-
- # Summary
-
- # Regression and test case description
-
- # Log in Google drive in HTML format
- if 'gdrive' in cfg and url:
- mail_ctx["log_url"] = mail.url2html(url,
- "Results on Google Drive")
- else:
- mail_ctx["log_url"] = "Not Available"
-
- # Elapsed Time
- mail_ctx["elapsed_time"] = str(datetime.timedelta(
- seconds=(int(end_time - start_time))))
-
- subject, body = compose_mail(args, cfg['mail'], mail_ctx)
-
- mail.send_mail(cfg['mail'], subject, body,
- [report_file, report_txt])
-
- print("Done")
+class MynewtClient(Client):
+ def __init__(self):
+ super().__init__(get_iut, sys.modules['autopts.ptsprojects.zephyr'], 'mynewt')
- bot.common.cleanup()
- print("\nBye!")
- sys.stdout.flush()
- return 0
+BotClient = MynewtBotClient
diff --git a/autopts/bot/zephyr.py b/autopts/bot/zephyr.py
index 574ec2fcf7..e775dbf76a 100755
--- a/autopts/bot/zephyr.py
+++ b/autopts/bot/zephyr.py
@@ -16,30 +16,22 @@
# more details.
#
-import collections
-import datetime
import importlib
-import json
import logging
import os
import sys
import time
import traceback
-from pathlib import Path
-
import serial
+from pathlib import Path
from autopts import bot
-from autopts.bot.common_features.github import update_sources
-from autopts.config import BOT_STATE_JSON, TMP_DIR
from autopts.ptsprojects.zephyr import ZEPHYR_PROJECT_URL
from autopts import client as autoptsclient
-
from autopts.bot.common import BotConfigArgs, BotClient, BuildAndFlashException
-from autopts.ptsprojects.boards import tty_to_com, release_device, get_build_and_flash, get_board_type
-from autopts.ptsprojects.testcase_db import DATABASE_FILE
+from autopts.ptsprojects.boards import tty_to_com, get_build_and_flash, get_board_type
from autopts.ptsprojects.zephyr.iutctl import get_iut, log
-from autopts.bot.common_features import github, report, mail, google_drive
+from autopts.bot.common_features import report
PROJECT_NAME = Path(__file__).stem
@@ -91,79 +83,6 @@ def zephyr_hash_url(commit):
commit)
-def make_readme_md(start_time, end_time, repos_info, pts_ver):
- """Creates README.md for Github logging repo
- """
- readme_file = 'tmp/README.md'
-
- Path(os.path.dirname(readme_file)).mkdir(parents=True, exist_ok=True)
-
- with open(readme_file, 'w') as f:
- readme_body = '''# AutoPTS report
-
- Start time: {}
-
- End time: {}
-
- PTS version: {}
-
- Repositories:
-
- '''.format(start_time, end_time, pts_ver)
- f.write(readme_body)
-
- f.writelines(
- ['\t{}: {} [{}]\n'.format(name, info['commit'], info['desc']) for name, info in repos_info.items()])
-
- return readme_file
-
-
-def compose_mail(args, mail_cfg, mail_ctx):
- """ Create a email body
- """
-
- additional_info = ''
- if 'additional_info_path' in mail_cfg:
- try:
- with open(mail_cfg['additional_info_path']) as file:
- additional_info = f'{file.read()}
'
- except Exception as e:
- logging.exception(e)
-
- iso_cal = datetime.date.today().isocalendar()
- ww_dd_str = "WW%s.%s" % (iso_cal[1], iso_cal[2])
-
- body = f'''
- This is automated email and do not reply.
- Bluetooth test session - {ww_dd_str}
- {additional_info}
- 1. IUT Setup
- Type: Zephyr
- Board: {args['board']}
- Source: {mail_ctx['repos_info']}
- 2. PTS Setup
- OS: Windows 10
- Platform: {args['platform']}
- Version: {args['pts_ver']}
- 3. Test Results
- Execution Time: {mail_ctx['elapsed_time']}
- {mail_ctx['summary']}
- Logs
- {mail_ctx['log_url']}
- Sincerely,
- {mail_cfg['name']}
-'''
-
- if 'subject' in mail_cfg:
- subject = mail_cfg['subject']
- else:
- subject = "AutoPTS test session results"
-
- subject = "%s - %s" % (subject, ww_dd_str)
-
- return subject, body
-
-
class ZephyrBotConfigArgs(BotConfigArgs):
def __init__(self, args):
super().__init__(args)
@@ -226,112 +145,11 @@ def apply_config(self, args, config, value):
time.sleep(10)
def start(self, args=None):
- main(self)
-
-
-class ZephyrClient(autoptsclient.Client):
- def __init__(self):
- super().__init__(get_iut, sys.modules['autopts.ptsprojects.zephyr'], 'zephyr')
-
-
-BotClient = ZephyrBotClient
-
-
-def main(bot_client):
- if os.path.exists(BOT_STATE_JSON):
- print('Continuing the previous terminated test run (remove tmp/ to start freshly)')
-
- with open(BOT_STATE_JSON, "r") as f:
- data = f.read()
- bot_state = json.loads(data)
- cfg = bot_state['config']
- args = cfg['auto_pts']
-
- else:
- # Start fresh test run
-
- bot_state = {}
- bot.common.pre_cleanup()
-
- bot_state['start_time'] = time.time()
-
- cfg = bot_client.bot_config
- args = cfg['auto_pts']
- bot_state['config'] = cfg
-
- if 'database_file' not in args:
- args['database_file'] = DATABASE_FILE
-
- if 'githubdrive' in cfg:
- update_sources(cfg['githubdrive']['path'],
- cfg['githubdrive']['remote'],
- cfg['githubdrive']['branch'], True)
+ super().start(args)
- args['kernel_image'] = os.path.join(args['project_path'], 'tests',
- 'bluetooth', 'tester', 'outdir',
- 'zephyr', 'zephyr.elf')
-
- if 'git' in cfg:
- bot_state['repos_info'] = github.update_repos(args['project_path'], cfg["git"])
- bot_state['repo_status'] = report.make_repo_status(bot_state['repos_info'])
- args['repos'] = cfg['git']
- else:
- bot_state['repos_info'] = {}
- bot_state['repo_status'] = ''
-
- if args.get('use_backup', False):
- os.makedirs(os.path.dirname(TMP_DIR), exist_ok=True)
-
- with open(BOT_STATE_JSON, "w") as f:
- f.write(json.dumps(bot_state, indent=4))
-
- try:
- stats = bot_client.run_tests()
- finally:
- release_device(bot_client.args.tty_file)
-
- summary = stats.get_status_count()
- results = stats.get_results()
- descriptions = stats.get_descriptions()
- regressions = stats.get_regressions()
- progresses = stats.get_progresses()
- new_cases = stats.get_new_cases()
- deleted_cases = []
- args['pts_ver'] = stats.pts_ver
- args['platform'] = stats.platform
-
- results = collections.OrderedDict(sorted(results.items()))
-
- pts_logs, xmls = report.pull_server_logs(bot_client.args)
-
- report_file = report.make_report_xlsx(results, summary, regressions,
- progresses, descriptions, xmls, PROJECT_NAME)
- report_txt = report.make_report_txt(results, regressions,
- progresses, bot_state['repo_status'], PROJECT_NAME)
-
- end_time = time.time()
- end_time_stamp = datetime.datetime.fromtimestamp(end_time).strftime("%Y_%m_%d_%H_%M_%S")
- start_time_stamp = datetime.datetime.fromtimestamp(bot_state['start_time']).strftime("%Y_%m_%d_%H_%M_%S")
- url = None
- github_link = None
- report_folder = None
-
- if 'githubdrive' in cfg or 'gdrive' in cfg:
- iut_logs = 'logs/'
- readme_file = make_readme_md(start_time_stamp, end_time_stamp,
- bot_state['repos_info'], args['pts_ver'])
-
- report_diff_txt, deleted_cases = report.make_report_diff(cfg['githubdrive'], results,
- regressions, progresses, new_cases)
-
- report_folder = report.make_report_folder(iut_logs, pts_logs, xmls, report_file,
- report_txt, report_diff_txt, readme_file,
- args['database_file'],
- '_iut_zephyr_' + start_time_stamp)
-
- if 'githubdrive' in cfg:
- print("Uploading to Github ...")
- commit_msg_pattern = '{branch}_{timestamp}_{commit_sha}'
+ def upload_logs_to_github(self, report_data):
+ cfg = report_data['config']
+ commit_msg_pattern = '{branch}_{start_time_stamp}_{commit_sha}'
branch = 'no_branch'
commit_sha = 'no_sha'
@@ -340,64 +158,24 @@ def main(bot_client):
commit_msg_pattern = cfg['githubdrive']['commit_msg']
if 'git' in cfg:
- commit_sha = bot_state['repos_info']['zephyr']['commit']
+ commit_sha = report_data['repos_info']['zephyr']['commit']
branch = cfg['git']['zephyr']['branch']
- commit_msg = commit_msg_pattern.format(
- timestamp=start_time_stamp, branch=branch, commit_sha=commit_sha)
- github_link, report_folder = report.github_push_report(
- report_folder, cfg['githubdrive'], commit_msg)
-
- if 'gdrive' in cfg:
- print("Uploading to GDrive ...")
- report.archive_testcases(report_folder, depth=2)
- drive = google_drive.Drive(cfg['gdrive'])
- url = drive.new_workdir(args['board'])
- drive.upload_folder(report_folder)
-
- if 'mail' in cfg:
- print("Sending email ...")
-
- # keep mail related context to simplify the code
- mail_ctx = {'repos_info': bot_state['repo_status'],
- 'summary': f'''{mail.status_dict2summary_html(summary)}
-{mail.regressions2html(regressions, descriptions)}
-{mail.progresses2html(progresses, descriptions)}
-{mail.new_cases2html(new_cases, descriptions)}
-{mail.deleted_cases2html(deleted_cases, descriptions)}''',
- }
-
- # Summary
-
- # Regression and test case description
+ report_data['commit_msg'] = commit_msg_pattern.format(
+ branch=branch, commit_sha=commit_sha, **report_data)
- # Log in Google drive in HTML format
- if 'gdrive' in cfg and url:
- mail_ctx["log_url"] = mail.url2html(url, "Results on Google Drive")
+ super().upload_logs_to_github(report_data)
- if 'githubdrive' in cfg and github_link:
- if 'log_url' in mail_ctx:
- mail_ctx["log_url"] += '
'
- else:
- mail_ctx["log_url"] = ''
- mail_ctx['log_url'] += mail.url2html(github_link, 'Results on Github')
+ def compose_mail(self, mail_ctx):
+ if 'subject' not in mail_ctx:
+ mail_ctx['subject'] = "[Zephyr] AutoPTS test session results"
- if 'log_url' not in mail_ctx:
- mail_ctx['log_url'] = 'Not Available'
+ super().compose_mail(mail_ctx)
- # Elapsed Time
- mail_ctx["elapsed_time"] = str(datetime.timedelta(
- seconds=(int(end_time - bot_state['start_time']))))
- subject, body = compose_mail(args, cfg['mail'], mail_ctx)
-
- mail.send_mail(cfg['mail'], subject, body,
- [report_file, report_txt])
-
- print("Done")
+class ZephyrClient(autoptsclient.Client):
+ def __init__(self):
+ super().__init__(get_iut, sys.modules['autopts.ptsprojects.zephyr'], 'zephyr')
- bot.common.cleanup()
- print("\nBye!")
- sys.stdout.flush()
- return 0
+BotClient = ZephyrBotClient
diff --git a/autopts/client.py b/autopts/client.py
index ca6a801a7a..f20e727f7b 100755
--- a/autopts/client.py
+++ b/autopts/client.py
@@ -38,7 +38,7 @@
from xmlrpc.server import SimpleXMLRPCServer
from termcolor import colored
-from autopts.config import TC_STATS_RESULTS_XML, TEST_CASE_DB, TMP_DIR
+from autopts.config import TC_STATS_RESULTS_XML, TEST_CASE_DB, TMP_DIR, IUT_LOGS_FOLDER
from autopts.ptsprojects import ptstypes
from autopts.ptsprojects import stack
from autopts.ptsprojects.boards import get_available_boards, tty_to_com
@@ -557,6 +557,7 @@ def __init__(self, projects, test_cases, retry_count, db=None,
xml_results_file=None):
self.pts_ver = ''
self.platform = ''
+ self.system_version = ''
self.run_count_max = retry_count + 1 # Run test at least once
self.run_count = 0 # Run count of current test case
self.num_test_cases = len(test_cases)
@@ -1211,7 +1212,7 @@ def run_test_cases(ptses, test_case_instances, args, stats, **kwargs):
ports_str = '_'.join(str(x) for x in args.cli_port)
now = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
- session_log_dir = 'logs/cli_port_' + ports_str + '/' + now
+ session_log_dir = f'{IUT_LOGS_FOLDER}/cli_port_{ports_str}/{now}'
try:
os.makedirs(session_log_dir)
except OSError as e:
@@ -1300,6 +1301,7 @@ def __init__(self, get_iut, project, name, parser_class=CliParser):
"""
self.test_cases = None
self.get_iut = get_iut
+ self.autopts_project_name = name
self.store_tag = name + '_'
setup_project_name(project)
self.boards = get_available_boards(name)
@@ -1357,7 +1359,7 @@ def main(self, _args=None):
elif self.args.sudo:
sys.exit("Please run this program as root.")
- os.makedirs(os.path.dirname(TMP_DIR), exist_ok=True)
+ os.makedirs(TMP_DIR, exist_ok=True)
if self.args.store:
tc_db_table_name = self.store_tag + str(self.args.board_name)
diff --git a/autopts/config.py b/autopts/config.py
index 7235786e32..335867c6d4 100644
--- a/autopts/config.py
+++ b/autopts/config.py
@@ -15,17 +15,32 @@
"""Configuration variables"""
+import os.path
+
SERVER_PORT = 65000
CLIENT_PORT = 65001
BTMON_PORT = 65432
MAX_SERVER_RESTART_TIME = 60
-TMP_DIR = 'tmp/'
-ALL_STATS_RESULTS_XML = TMP_DIR + 'all_stats_results.xml'
-TC_STATS_RESULTS_XML = TMP_DIR + 'tc_stats_results.xml'
-TEST_CASES_JSON = TMP_DIR + 'test_cases_file.json'
-ALL_STATS_JSON = TMP_DIR + 'all_stats.json'
-TC_STATS_JSON = TMP_DIR + 'tc_stats.json'
-TEST_CASE_DB = TMP_DIR + 'TestCase.db'
-BOT_STATE_JSON = TMP_DIR + 'bot_state.json'
+AUTOPTS_ROOT_DIR = os.path.dirname( # auto-pts repo directory
+ os.path.dirname( # autopts module directory
+ os.path.abspath(__file__))) # this file directory
+
+TMP_DIR = 'tmp'
+ALL_STATS_RESULTS_XML = os.path.join(TMP_DIR, 'all_stats_results.xml')
+TC_STATS_RESULTS_XML = os.path.join(TMP_DIR, 'tc_stats_results.xml')
+TEST_CASES_JSON = os.path.join(TMP_DIR, 'test_cases_file.json')
+ALL_STATS_JSON = os.path.join(TMP_DIR, 'all_stats.json')
+TC_STATS_JSON = os.path.join(TMP_DIR, 'tc_stats.json')
+TEST_CASE_DB = os.path.join(TMP_DIR, 'TestCase.db')
+BOT_STATE_JSON = os.path.join(TMP_DIR, 'bot_state.json')
+REPORT_README_MD = os.path.join(TMP_DIR, 'README.md')
+AUTOPTS_REPORT_FOLDER = os.path.join(TMP_DIR, 'autopts_report')
+IUT_LOGS_FOLDER = 'logs'
+PTS_XMLS_FOLDER = os.path.join(TMP_DIR, 'XMLs')
+ERRATA_DIR_PATH = os.path.join(AUTOPTS_ROOT_DIR, 'errata')
+REPORT_XLSX = "report.xlsx"
+REPORT_TXT = "report.txt"
+REPORT_DIFF_TXT = "report-diff.txt"
+ERROR_TXT = 'error.txt'
diff --git a/autoptsserver.py b/autoptsserver.py
index c59a53e2eb..735a1b4920 100755
--- a/autoptsserver.py
+++ b/autoptsserver.py
@@ -32,6 +32,7 @@
import copy
import logging as root_logging
import os
+import platform
import shutil
import subprocess
import sys
@@ -502,6 +503,7 @@ def server_init(self):
self.server.register_function(self.copy_file, 'copy_file')
self.server.register_function(self.delete_file, 'delete_file')
self.server.register_function(self.get_system_model, 'get_system_model')
+ self.server.register_function(self.get_system_version, 'get_system_version')
self.server.register_function(self.shutdown_pts_bpv, 'shutdown_pts_bpv')
self.server.register_function(self.get_path, 'get_path')
self.server.register_instance(self.pts)
@@ -549,6 +551,11 @@ def get_system_model(self):
return 'Real HW'
return 'PotatOS'
+ def get_system_version(self):
+ os_name = platform.system()
+ version = platform.release()
+ return f'{os_name} {version}'
+
def get_path(self):
self._update_request_time()
return os.path.dirname(os.path.abspath(__file__))
diff --git a/test/unittests.py b/test/unittests.py
index aa3de0a41c..44eb50aa7c 100644
--- a/test/unittests.py
+++ b/test/unittests.py
@@ -8,7 +8,7 @@
from autopts.bot.zephyr import make_readme_md
from autopts.client import FakeProxy, TestCaseRunStats
-from autopts.config import TMP_DIR, ALL_STATS_RESULTS_XML
+from autopts.config import TMP_DIR, ALL_STATS_RESULTS_XML, IUT_LOGS_FOLDER
from autopts.ptsprojects.testcase_db import TestCaseTable
from autoptsclient_bot import import_bot_projects, import_bot_module
from test.mocks.mocked_test_cases import mock_workspace_test_cases, test_case_list_generation_samples
@@ -226,7 +226,7 @@ def generate_stats(self, files):
repos_info = {'zephyr': {'commit': '123456', 'desc': 'zephyr'}}
pts_ver = '8_5_0'
- iut_logs = 'logs/'
+ iut_logs = IUT_LOGS_FOLDER
pts_logs = 'tmp/zephyr-master'
xmls = 'tmp/XMLs'
Path(iut_logs).mkdir(parents=True, exist_ok=True)
diff --git a/tools/cron/common.py b/tools/cron/common.py
index 6a4d9712be..aad6839c52 100644
--- a/tools/cron/common.py
+++ b/tools/cron/common.py
@@ -51,7 +51,7 @@
from autopts.bot.common import load_module_from_path
from autopts.bot.common_features.github import update_repos
from autopts.bot.common_features.mail import send_mail
-from autopts.config import TC_STATS_JSON
+from autopts.config import TC_STATS_JSON, TMP_DIR, IUT_LOGS_FOLDER, REPORT_XLSX, REPORT_TXT
from tools.cron.compatibility import find_latest, find_by_project_hash, find_by_autopts_hash, find_by_pts_ver, \
get_hash_from_reference
from tools.cron.remote_terminal import RemoteTerminalClientProxy
@@ -307,10 +307,10 @@ def ssh_copy_file(hostname, username, password,
def pre_cleanup_files(autopts_repo, project_repo):
files_to_save = [
- os.path.join(autopts_repo, 'tmp/'),
- os.path.join(autopts_repo, 'logs/'),
- os.path.join(autopts_repo, 'report.txt'),
- os.path.join(autopts_repo, 'report.xlsx'),
+ os.path.join(autopts_repo, TMP_DIR),
+ os.path.join(autopts_repo, IUT_LOGS_FOLDER),
+ os.path.join(autopts_repo, REPORT_TXT),
+ os.path.join(autopts_repo, REPORT_XLSX),
os.path.join(autopts_repo, 'TestCase.db'),
os.path.join(autopts_repo, 'stdout_autoptsbot.log'),
os.path.join(autopts_repo, 'stdout_autoptsserver.log'),