forked from openSUSE/openSUSE-release-tools
-
Notifications
You must be signed in to change notification settings - Fork 0
/
factory_submit_request_tests.py
166 lines (130 loc) · 7.69 KB
/
factory_submit_request_tests.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
from . import OBSLocal
import os
# Needed to mock LegalAuto
from osc.core import change_review_state
from unittest.mock import MagicMock
# Import the involved staging commands
from osclib.freeze_command import FreezeCommand
from osclib.select_command import SelectCommand
from osclib.accept_command import AcceptCommand
# Import the involved bots
from check_source import CheckSource
legal_auto = __import__("legal-auto") # Needed because of the dash in the filename
LegalAuto = legal_auto.LegalAuto
PROJECT = 'openSUSE:Factory'
DEVEL_PROJECT = 'devel:drinking'
STAGING_PROJECT_NAME = 'openSUSE:Factory:Staging:A'
HUMAN_REVIEWER = 'factory-cop'
FIXTURES = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures')
class TestFactorySubmitRequest(OBSLocal.TestCase):
"""Tests for the whole lifecycle of submit requests in Factory
This test is intended to showcase the typical workflow of new submit request for a ring package
in Factory. Good for newcommers and to serve as a reference to create tests for similar
scenarios.
The goal is not to test all possible combinations of things that could go wrong with every
review bot. Please use separate per-bot tests (like check_source_test.py) for that.
This is also useful as smoke test, to check that all the pieces keep working together.
"""
def setUp(self):
super(TestFactorySubmitRequest, self).setUp()
# Setup the basic scenario, with manual reviewers, staging projects, rings and wine as
# example package (wine is in ring1, see OBSLocal.FactoryWorkflow.setup_rings)
self.wf = OBSLocal.FactoryWorkflow(PROJECT)
self.__setup_review_team()
self.__setup_devel_package('wine')
self.wf.setup_rings(devel_project=DEVEL_PROJECT)
# Relax the requisites to send a submit request to Factory,
# so the CheckSource bot is easier to please
self.wf.remote_config_set({'required-source-maintainer': ''})
# Setup the different bots typically used for Factory
self.setup_review_bot(self.wf, PROJECT, 'factory-auto', CheckSource)
self.setup_review_bot(self.wf, PROJECT, 'licensedigger', LegalAuto)
# Sorry, but LegalAuto is simply too hard to test while keeping this test readable,
# see the description of __mock_licendigger for more rationale
self.__mock_licensedigger()
# The staging project must be frozen in order to move packages into it
FreezeCommand(self.wf.api).perform(STAGING_PROJECT_NAME)
# Create the submit request
self.request = self.wf.create_submit_request(DEVEL_PROJECT, 'wine', add_commit=False)
def tearDown(self):
super().tearDown()
del self.wf
def test_happy_path(self):
"""Tests the ideal case in which all bots are happy and the request successfully goes
through staging"""
# Initial state: reviews have been created for the bots and for the staging workflow
reqid = self.request.reqid
self.assertReview(reqid, by_user=('factory-auto', 'new'))
self.assertReview(reqid, by_user=('licensedigger', 'new'))
self.assertReview(reqid, by_group=('factory-staging', 'new'))
# Let bots come into play
self.execute_review_bot([reqid], 'factory-auto')
self.execute_review_bot([reqid], 'licensedigger')
# Bots are happy, now it's time for manual review (requested by the bots) and
# for the staging work
self.assertReview(reqid, by_user=('factory-auto', 'accepted'))
self.assertReview(reqid, by_user=('licensedigger', 'accepted'))
# This review will be accepted when the Staging Manager puts it into a staging project
self.assertReview(reqid, by_group=('factory-staging', 'new'))
# Review created by CheckSource bot. This review should be manually accepted.
self.assertReview(reqid, by_group=('opensuse-review-team', 'new'))
# Let's first accept the manual review
change_review_state(
apiurl=self.wf.apiurl, reqid=reqid,
newstate='accepted', by_group='opensuse-review-team'
)
# Now only the staging workflow is pending
self.assertReview(reqid, by_user=('factory-auto', 'accepted'))
self.assertReview(reqid, by_user=('licensedigger', 'accepted'))
self.assertReview(reqid, by_group=('opensuse-review-team', 'accepted'))
self.assertReview(reqid, by_group=('factory-staging', 'new'))
# Before using the staging plugin, we need to force a reload of the configuration
# because execute_review_bot temporarily switches the user and that causes problems
self.wf.load_config()
# The Staging Manager puts the request into a staging project
SelectCommand(self.wf.api, STAGING_PROJECT_NAME).perform(['wine'])
# The factory-staging review is now accepted and a new review associated to the
# staging project has been created
self.assertReview(reqid, by_group=('factory-staging', 'accepted'))
self.assertReview(reqid, by_project=(STAGING_PROJECT_NAME, 'new'))
# Let's say everything looks good in the staging project and the Staging Manager accepts it
AcceptCommand(self.wf.api).accept_all([STAGING_PROJECT_NAME], True)
# Finally, all the reviews are accepted: one for each bot, one for manual review and
# two for the staging project (one as a consequence of selecting the package into a
# staging project and the other as a consequence of accepting the staging)
self.assertReview(reqid, by_user=('factory-auto', 'accepted'))
self.assertReview(reqid, by_user=('licensedigger', 'accepted'))
self.assertReview(reqid, by_group=('opensuse-review-team', 'accepted'))
self.assertReview(reqid, by_group=('factory-staging', 'accepted'))
self.assertReview(reqid, by_project=(STAGING_PROJECT_NAME, 'accepted'))
# So it's time to accept the request
self.request.change_state('accepted')
self.assertRequestState(reqid, name='accepted')
def __setup_devel_package(self, pkg_name):
pkg = self.wf.create_package(DEVEL_PROJECT, pkg_name)
pkg.commit_files(os.path.join(FIXTURES, 'packages', pkg_name))
def __setup_review_team(self):
"""Creates the review team with some user on it
According to the default configuration for Factory, the CheckSource bot must create a review
for the group 'opensuse-review-team' for each request that passes the automatic checks.
That behavior can be configured with the following two parameters: 'review-team' and
'check-source-add-review-team'. This function ensures the test can work with the default
configuration, to serve as a realistic example.
"""
self.wf.create_user(HUMAN_REVIEWER)
self.wf.create_group('opensuse-review-team', users=[HUMAN_REVIEWER])
def __mock_licensedigger(self):
"""Mocks the execution of the LegalAuto bot, so it always succeeds and accepts the review
Setting up a bot and then just mocking its whole execution may look pointless, but this
testcase was conceived as a showcase of the Factory workflow, so all relevant bots should be
represented. Unfortunatelly, LegalAuto is not written to be testable and it's very dependant
on external components. Hopefully this whole mock could be removed in the future.
"""
bot = self.review_bots['licensedigger']
bot.check_requests = MagicMock(side_effect=self.__accept_license)
def __accept_license(self):
"""See :func:`__mock_licensedigger`"""
change_review_state(
apiurl=self.wf.apiurl, reqid=self.request.reqid,
newstate='accepted', by_user='licensedigger'
)