From 0f6a4c7b368bb5458e7f26f34bb0aeedcac9808c Mon Sep 17 00:00:00 2001 From: Christophe Haen Date: Fri, 1 Sep 2023 17:17:17 +0200 Subject: [PATCH 1/8] feat (diracx): add initial future client for DiracX job monitoring --- .../environment_variable_configuration.rst | 3 ++ .../Client/JobMonitoringClient.py | 8 +++++ .../FutureClient/JobMonitoringClient.py | 36 +++++++++++++++++++ .../FutureClient/__init__.py | 0 4 files changed, 47 insertions(+) create mode 100644 src/DIRAC/WorkloadManagementSystem/FutureClient/JobMonitoringClient.py create mode 100644 src/DIRAC/WorkloadManagementSystem/FutureClient/__init__.py diff --git a/docs/source/AdministratorGuide/ServerInstallations/environment_variable_configuration.rst b/docs/source/AdministratorGuide/ServerInstallations/environment_variable_configuration.rst index 09d0d29b3fa..ab494abfb0d 100644 --- a/docs/source/AdministratorGuide/ServerInstallations/environment_variable_configuration.rst +++ b/docs/source/AdministratorGuide/ServerInstallations/environment_variable_configuration.rst @@ -18,6 +18,9 @@ DIRAC_DEPRECATED_FAIL If set, the use of functions or objects that are marked ``@deprecated`` will fail. Useful for example in continuous integration tests against future versions of DIRAC +DIRAC_ENABLE_DIRACX_JOB_MONITORING + If set, calls the diracx job monitoring service. Off by default. + DIRAC_FEWER_CFG_LOCKS If ``true`` or ``yes`` or ``on`` or ``1`` or ``y`` or ``t``, DIRAC will reduce the number of locks used when accessing the CS for better performance (default, ``no``). diff --git a/src/DIRAC/WorkloadManagementSystem/Client/JobMonitoringClient.py b/src/DIRAC/WorkloadManagementSystem/Client/JobMonitoringClient.py index 288b89143fe..ecf53fa97b9 100755 --- a/src/DIRAC/WorkloadManagementSystem/Client/JobMonitoringClient.py +++ b/src/DIRAC/WorkloadManagementSystem/Client/JobMonitoringClient.py @@ -1,5 +1,6 @@ """ Class that contains client access to the job monitoring handler. """ +import os from DIRAC.Core.Base.Client import Client, createClient from DIRAC.Core.Utilities.DEncode import ignoreEncodeWarning from DIRAC.Core.Utilities.JEncode import strToIntDict @@ -11,6 +12,13 @@ def __init__(self, **kwargs): super().__init__(**kwargs) self.setServer("WorkloadManagement/JobMonitoring") + if os.getenv("DIRAC_ENABLE_DIRACX_JOB_MONITORING", "No").lower() in ("yes", "true"): + from DIRAC.WorkloadManagementSystem.FutureClient.JobMonitoringClient import ( + JobMonitoringClient as futureJobMonitoringClient, + ) + + httpsClient = futureJobMonitoringClient + @ignoreEncodeWarning def getJobsStatus(self, jobIDs): res = self._getRPC().getJobsStatus(jobIDs) diff --git a/src/DIRAC/WorkloadManagementSystem/FutureClient/JobMonitoringClient.py b/src/DIRAC/WorkloadManagementSystem/FutureClient/JobMonitoringClient.py new file mode 100644 index 00000000000..dfbf98c3d54 --- /dev/null +++ b/src/DIRAC/WorkloadManagementSystem/FutureClient/JobMonitoringClient.py @@ -0,0 +1,36 @@ +# pylint: disable=import-error +import os + +if os.getenv("DIRAC_ENABLE_DIRACX_JOB_MONITORING", "No").lower() in ("yes", "true"): + from diracx.client import Dirac + from diracx.client.models import JobSearchParams + + from diracx.cli.utils import get_auth_headers + from diracx.core.preferences import DiracxPreferences + + from DIRAC.Core.Utilities.ReturnValues import convertToReturnValue + + class JobMonitoringClient: + def __init__(self, *args, **kwargs): + self.endpoint = DiracxPreferences().url + + def fetch(self, parameters, jobIDs): + with Dirac(endpoint=self.endpoint) as api: + jobs = api.jobs.search( + parameters=["JobID"] + parameters, + search=[{"parameter": "JobID", "operator": "in", "values": jobIDs}], + headers=get_auth_headers(), + ) + return {j["JobID"]: {param: j[param] for param in parameters} for j in jobs} + + @convertToReturnValue + def getJobsMinorStatus(self, jobIDs): + return self.fetch(["MinorStatus"], jobIDs) + + @convertToReturnValue + def getJobsStates(self, jobIDs): + return self.fetch(["Status", "MinorStatus", "ApplicationStatus"], jobIDs) + + @convertToReturnValue + def getJobsSites(self, jobIDs): + return self.fetch(["Site"], jobIDs) diff --git a/src/DIRAC/WorkloadManagementSystem/FutureClient/__init__.py b/src/DIRAC/WorkloadManagementSystem/FutureClient/__init__.py new file mode 100644 index 00000000000..e69de29bb2d From 1dcaf5f702e9a8a57bf5a6976b1a3a45e41dc998 Mon Sep 17 00:00:00 2001 From: Christophe Haen Date: Fri, 1 Sep 2023 17:19:18 +0200 Subject: [PATCH 2/8] feat (diracx): exchange a proxy for an equivalent token --- .../environment_variable_configuration.rst | 3 ++ .../Service/ProxyManagerHandler.py | 40 +++++++++++++++++++ .../FrameworkSystem/scripts/dirac_login.py | 32 ++++++++++++++- .../scripts/dirac_proxy_init.py | 23 +++++++++++ 4 files changed, 97 insertions(+), 1 deletion(-) diff --git a/docs/source/AdministratorGuide/ServerInstallations/environment_variable_configuration.rst b/docs/source/AdministratorGuide/ServerInstallations/environment_variable_configuration.rst index ab494abfb0d..3361e136d50 100644 --- a/docs/source/AdministratorGuide/ServerInstallations/environment_variable_configuration.rst +++ b/docs/source/AdministratorGuide/ServerInstallations/environment_variable_configuration.rst @@ -21,6 +21,9 @@ DIRAC_DEPRECATED_FAIL DIRAC_ENABLE_DIRACX_JOB_MONITORING If set, calls the diracx job monitoring service. Off by default. +DIRAC_ENABLE_DIRACX_LOGIN + If set, retrieve a DiracX token when calling dirac-proxy-init or dirac-login + DIRAC_FEWER_CFG_LOCKS If ``true`` or ``yes`` or ``on`` or ``1`` or ``y`` or ``t``, DIRAC will reduce the number of locks used when accessing the CS for better performance (default, ``no``). diff --git a/src/DIRAC/FrameworkSystem/Service/ProxyManagerHandler.py b/src/DIRAC/FrameworkSystem/Service/ProxyManagerHandler.py index 96e90abbf45..7b50a1f5b04 100644 --- a/src/DIRAC/FrameworkSystem/Service/ProxyManagerHandler.py +++ b/src/DIRAC/FrameworkSystem/Service/ProxyManagerHandler.py @@ -6,6 +6,7 @@ :dedent: 2 :caption: ProxyManager options """ + from DIRAC import gLogger, S_OK, S_ERROR from DIRAC.Core.DISET.RequestHandler import RequestHandler, getServiceOption from DIRAC.Core.Security import Properties @@ -406,6 +407,45 @@ def export_getVOMSProxyWithToken(self, userDN, userGroup, requestPem, requiredLi self.__proxyDB.logAction("download voms proxy with token", credDict["DN"], credDict["group"], userDN, userGroup) return self.__getVOMSProxy(userDN, userGroup, requestPem, requiredLifetime, vomsAttribute, True) + types_exchangeProxyForToken = [] + + def export_exchangeProxyForToken(self): + """Exchange a proxy for an equivalent token to be used with diracx""" + try: + from diracx.routers.auth import ( # pylint: disable=import-error + AuthSettings, + create_access_token, + TokenResponse, + ) # pylint: disable=import-error + + authSettings = AuthSettings() + + from uuid import uuid4 + + credDict = self.getRemoteCredentials() + vo = Registry.getVOForGroup(credDict["group"]) + payload = { + "sub": f"{vo}:{credDict['username']}", + "vo": vo, + "aud": authSettings.token_audience, + "iss": authSettings.token_issuer, + "dirac_properties": list( + set(credDict.get("groupProperties", [])) | set(credDict.get("properties", [])) + ), + "jti": str(uuid4()), + "preferred_username": credDict["username"], + "dirac_group": credDict["group"], + } + return S_OK( + TokenResponse( + access_token=create_access_token(payload, authSettings), + expires_in=authSettings.access_token_expire_minutes * 60, + state="None", + ).dict() + ) + except Exception as e: + return S_ERROR(f"Could not get token: {e!r}") + class ProxyManagerHandler(ProxyManagerHandlerMixin, RequestHandler): pass diff --git a/src/DIRAC/FrameworkSystem/scripts/dirac_login.py b/src/DIRAC/FrameworkSystem/scripts/dirac_login.py index ec1f3a2c7fd..049ee2176e4 100644 --- a/src/DIRAC/FrameworkSystem/scripts/dirac_login.py +++ b/src/DIRAC/FrameworkSystem/scripts/dirac_login.py @@ -16,6 +16,9 @@ import os import sys import copy +import datetime +import json +from pathlib import Path from prompt_toolkit import prompt, print_formatted_text as print, HTML import DIRAC @@ -26,6 +29,13 @@ from DIRAC.Core.Security.ProxyInfo import getProxyInfo, formatProxyInfoAsString from DIRAC.Core.Security.X509Chain import X509Chain # pylint: disable=import-error from DIRAC.Core.Base.Script import Script +from DIRAC.Core.Base.Client import Client + + +# token location +DIRAC_TOKEN_FILE = Path.home() / ".cache" / "diracx" / "credentials.json" +EXPIRES_GRACE_SECONDS = 15 + # At this point, we disable CS synchronization so that an error related # to the lack of a proxy certificate does not occur when trying to synchronize. @@ -304,7 +314,27 @@ def loginWithCertificate(self): # Upload proxy to the server if it longer that uploaded one if credentials["secondsLeft"] > uploadedProxyLifetime: gLogger.notice("Upload proxy to server.") - return gProxyManager.uploadProxy(proxy) + res = gProxyManager.uploadProxy(proxy) + if not res["OK"]: + return res + + # Get a token for use with diracx + if os.getenv("DIRAC_ENABLE_DIRACX_LOGIN", "No").lower() in ("yes", "true"): + res = Client(url="Framework/ProxyManager").exchangeProxyForToken() + if not res["OK"]: + return res + DIRAC_TOKEN_FILE.parent.mkdir(parents=True, exist_ok=True) + expires = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta( + seconds=res["Value"]["expires_in"] - EXPIRES_GRACE_SECONDS + ) + credential_data = { + "access_token": res["Value"]["access_token"], + # TODO: "refresh_token": + # TODO: "refresh_token_expires": + "expires": expires.isoformat(), + } + DIRAC_TOKEN_FILE.write_text(json.dumps(credential_data)) + return S_OK() def __enableCS(self): diff --git a/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py b/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py index cf31864c02a..6fabcace1a9 100755 --- a/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py +++ b/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py @@ -9,6 +9,7 @@ import os import sys import glob +import json import time import datetime @@ -21,6 +22,12 @@ from DIRAC.Core.Security.Locations import getCAsLocation from DIRAC.ConfigurationSystem.Client.Helpers import Registry from DIRAC.FrameworkSystem.Client.BundleDeliveryClient import BundleDeliveryClient +from DIRAC.Core.Base.Client import Client +from pathlib import Path + + +DIRAC_TOKEN_FILE = Path.home() / ".cache" / "diracx" / "credentials.json" +EXPIRES_GRACE_SECONDS = 15 class Params(ProxyGeneration.CLIParams): @@ -237,6 +244,22 @@ def doTheMagic(self): if self.__piParams.strict: return resultProxyUpload + if os.getenv("DIRAC_ENABLE_DIRACX_LOGIN", "No").lower() in ("yes", "true"): + res = Client(url="Framework/ProxyManager").exchangeProxyForToken() + if not res["OK"]: + return res + DIRAC_TOKEN_FILE.parent.mkdir(parents=True, exist_ok=True) + expires = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta( + seconds=res["Value"]["expires_in"] - EXPIRES_GRACE_SECONDS + ) + credential_data = { + "access_token": res["Value"]["access_token"], + # TODO: "refresh_token": + # TODO: "refresh_token_expires": + "expires": expires.isoformat(), + } + DIRAC_TOKEN_FILE.write_text(json.dumps(credential_data)) + return S_OK() From b5f977e21685d80a86cff05d02ab31cd5b490d64 Mon Sep 17 00:00:00 2001 From: Christophe Haen Date: Fri, 1 Sep 2023 17:23:58 +0200 Subject: [PATCH 3/8] feat (CS): remove the setup when looking for shifter --- src/DIRAC/ConfigurationSystem/Client/CSAPI.py | 22 +++++-------------- 1 file changed, 6 insertions(+), 16 deletions(-) diff --git a/src/DIRAC/ConfigurationSystem/Client/CSAPI.py b/src/DIRAC/ConfigurationSystem/Client/CSAPI.py index 79ad7515c34..2dbab567651 100644 --- a/src/DIRAC/ConfigurationSystem/Client/CSAPI.py +++ b/src/DIRAC/ConfigurationSystem/Client/CSAPI.py @@ -612,25 +612,15 @@ def getOpsSection(): Where is the shifters section? """ vo = CSGlobals.getVO() - setup = CSGlobals.getSetup() if vo: - res = gConfig.getSections(f"/Operations/{vo}/{setup}/Shifter") + res = gConfig.getSections(f"/Operations/{vo}/Shifter") if res["OK"]: - return S_OK(f"/Operations/{vo}/{setup}/Shifter") + return S_OK(f"/Operations/{vo}/Shifter") - res = gConfig.getSections(f"/Operations/{vo}/Defaults/Shifter") - if res["OK"]: - return S_OK(f"/Operations/{vo}/Defaults/Shifter") - - else: - res = gConfig.getSections(f"/Operations/{setup}/Shifter") - if res["OK"]: - return S_OK(f"/Operations/{setup}/Shifter") - - res = gConfig.getSections("/Operations/Defaults/Shifter") - if res["OK"]: - return S_OK("/Operations/Defaults/Shifter") + res = gConfig.getSections("/Operations/Defaults/Shifter") + if res["OK"]: + return S_OK("/Operations/Defaults/Shifter") return S_ERROR("No shifter section") @@ -671,7 +661,7 @@ def getOpsSection(): gLogger.info("Adding shifter section") vo = CSGlobals.getVO() if vo: - section = f"/Operations/{vo}/Defaults/Shifter" + section = f"/Operations/{vo}/Shifter" else: section = "/Operations/Defaults/Shifter" res = self.__csMod.createSection(section) From 88c3cdda97be380a2e6603200a8fe3d06be19676 Mon Sep 17 00:00:00 2001 From: Christophe Haen Date: Fri, 1 Sep 2023 17:24:12 +0200 Subject: [PATCH 4/8] feat (diracx): start a diracx container when running the Integration tests --- integration_tests.py | 73 +++++++++++++++++++++++---- tests/CI/check_db_initialized.sh | 22 +++++++++ tests/CI/docker-compose.yml | 85 +++++++++++++++++++++++++++++++- tests/CI/exportCSLoop.sh | 14 ++++++ 4 files changed, 183 insertions(+), 11 deletions(-) create mode 100755 tests/CI/check_db_initialized.sh create mode 100755 tests/CI/exportCSLoop.sh diff --git a/integration_tests.py b/integration_tests.py index 65544fdf85a..e22d30ad79b 100755 --- a/integration_tests.py +++ b/integration_tests.py @@ -29,13 +29,16 @@ "DIRACOSVER": "master", "DIRACOS_TARBALL_PATH": None, "TEST_HTTPS": "Yes", + "TEST_DIRACX": "No", "DIRAC_FEWER_CFG_LOCKS": None, "DIRAC_USE_JSON_ENCODE": None, "INSTALLATION_BRANCH": "", } -DEFAULT_MODULES = { - "DIRAC": Path(__file__).parent.absolute(), -} +DIRACX_OPTIONS = ( + "DIRAC_ENABLE_DIRACX_LOGIN", + "DIRAC_ENABLE_DIRACX_JOB_MONITORING", +) +DEFAULT_MODULES = {"DIRAC": Path(__file__).parent.absolute()} # Static configuration DB_USER = "Dirac" @@ -180,7 +183,7 @@ def destroy(): with _gen_docker_compose(DEFAULT_MODULES) as docker_compose_fn: os.execvpe( "docker-compose", - ["docker-compose", "-f", docker_compose_fn, "down", "--remove-orphans", "-t", "0"], + ["docker-compose", "-f", docker_compose_fn, "down", "--remove-orphans", "-t", "0", "--volumes"], _make_env({}), ) @@ -193,7 +196,6 @@ def prepare_environment( release_var: Optional[str] = None, ): """Prepare the local environment for installing DIRAC.""" - _check_containers_running(is_up=False) if editable is None: editable = sys.stdout.isatty() @@ -224,7 +226,7 @@ def prepare_environment( typer.secho("Running docker-compose to create containers", fg=c.GREEN) with _gen_docker_compose(modules) as docker_compose_fn: subprocess.run( - ["docker-compose", "-f", docker_compose_fn, "up", "-d"], + ["docker-compose", "-f", docker_compose_fn, "up", "-d", "dirac-server", "dirac-client"], check=True, env=docker_compose_env, ) @@ -313,6 +315,27 @@ def prepare_environment( ) subprocess.run(command, check=True, shell=True) + docker_compose_fn_final = Path(tempfile.mkdtemp()) / "ci" + typer.secho("Running docker-compose to create DiracX containers", fg=c.GREEN) + typer.secho(f"Will eave a folder behind: {docker_compose_fn_final}", fg=c.YELLOW) + + with _gen_docker_compose(modules) as docker_compose_fn: + # We cannot use the temporary directory created in the context manager because + # we don't stay in the contect manager (Popen) + # So we need something that outlives it. + shutil.copytree(docker_compose_fn.parent, docker_compose_fn_final, dirs_exist_ok=True) + # We use Popen because we don't want to wait for this command to finish. + # It is going to start all the diracx containers, including one which waits + # for the DIRAC installation to be over. + subprocess.Popen( + ["docker-compose", "-f", docker_compose_fn_final / "docker-compose.yml", "up", "-d", "diracx"], + env=docker_compose_env, + stdin=None, + stdout=None, + stderr=None, + close_fds=True, + ) + @app.command() def install_server(): @@ -326,6 +349,15 @@ def install_server(): check=True, ) + # This runs a continuous loop that exports the config in yaml + # for the diracx container to use + typer.secho("Starting configuration export loop for diracx", fg=c.GREEN) + base_cmd = _build_docker_cmd("server", tty=False, daemon=True) + subprocess.run( + base_cmd + ["bash", "/home/dirac/LocalRepo/ALTERNATIVE_MODULES/DIRAC/tests/CI/exportCSLoop.sh"], + check=True, + ) + typer.secho("Copying credentials and certificates", fg=c.GREEN) base_cmd = _build_docker_cmd("client", tty=False) subprocess.run( @@ -508,13 +540,24 @@ def _gen_docker_compose(modules): # Load the docker-compose configuration and mount the necessary volumes input_fn = Path(__file__).parent / "tests/CI/docker-compose.yml" docker_compose = yaml.safe_load(input_fn.read_text()) + # diracx-wait-for-db needs the volume to be able to run the witing script + for ctn in ("dirac-server", "dirac-client", "diracx-wait-for-db"): + if "volumes" not in docker_compose["services"][ctn]: + docker_compose["services"][ctn]["volumes"] = [] volumes = [f"{path}:/home/dirac/LocalRepo/ALTERNATIVE_MODULES/{name}" for name, path in modules.items()] volumes += [f"{path}:/home/dirac/LocalRepo/TestCode/{name}" for name, path in modules.items()] - docker_compose["services"]["dirac-server"]["volumes"] = volumes[:] - docker_compose["services"]["dirac-client"]["volumes"] = volumes[:] + docker_compose["services"]["dirac-server"]["volumes"].extend(volumes[:]) + docker_compose["services"]["dirac-client"]["volumes"].extend(volumes[:]) + docker_compose["services"]["diracx-wait-for-db"]["volumes"].extend(volumes[:]) + + module_configs = _load_module_configs(modules) + if "diracx" in module_configs: + docker_compose["services"]["diracx"]["volumes"].append( + f"{modules['diracx']}/src/diracx:{module_configs['diracx']['install-location']}" + ) # Add any extension services - for module_name, module_configs in _load_module_configs(modules).items(): + for module_name, module_configs in module_configs.items(): for service_name, service_config in module_configs["extra-services"].items(): typer.secho(f"Adding service {service_name} for {module_name}", err=True, fg=c.GREEN) docker_compose["services"][service_name] = service_config.copy() @@ -981,6 +1024,8 @@ def _make_config(modules, flags, release_var, editable): "CLIENT_HOST": "client", # Test specific variables "WORKSPACE": "/home/dirac", + # DiracX variable + "DIRACX_URL": "http://diracx:8000", } if editable: @@ -1006,6 +1051,12 @@ def _make_config(modules, flags, release_var, editable): except KeyError: typer.secho(f"Required feature variable {key!r} is missing", err=True, fg=c.RED) raise typer.Exit(code=1) + + # If we test DiracX, enable all the options + if config["TEST_DIRACX"].lower() in ("yes", "true"): + for key in DIRACX_OPTIONS: + config[key] = "Yes" + config["TESTREPO"] = [f"/home/dirac/LocalRepo/TestCode/{name}" for name in modules] config["ALTERNATIVE_MODULES"] = [f"/home/dirac/LocalRepo/ALTERNATIVE_MODULES/{name}" for name in modules] @@ -1027,7 +1078,7 @@ def _load_module_configs(modules): return module_ci_configs -def _build_docker_cmd(container_name, *, use_root=False, cwd="/home/dirac", tty=True): +def _build_docker_cmd(container_name, *, use_root=False, cwd="/home/dirac", tty=True, daemon=False): if use_root or os.getuid() == 0: user = "root" else: @@ -1042,6 +1093,8 @@ def _build_docker_cmd(container_name, *, use_root=False, cwd="/home/dirac", tty= err=True, fg=c.YELLOW, ) + if daemon: + cmd += ["-d"] cmd += [ "-e=TERM=xterm-color", "-e=INSTALLROOT=/home/dirac", diff --git a/tests/CI/check_db_initialized.sh b/tests/CI/check_db_initialized.sh new file mode 100755 index 00000000000..96cc47e6016 --- /dev/null +++ b/tests/CI/check_db_initialized.sh @@ -0,0 +1,22 @@ +#!/bin/bash +dbMissing=true; +allDBs=(JobDB FileCatalogDB) +while ${dbMissing}; +do + dbMissing=false; + allExistingDBs=$(mysql -uDirac -pDirac -h mysql -P 3306 -e "show databases;"); + for db in "${allDBs[@]}"; + do + if grep -q "${db}" <<< "${allExistingDBs}"; + then + echo "${db} OK"; + else + echo "${db} not created"; + dbMissing=true; + fi; + done; + if ${dbMissing}; + then + sleep 1; + fi +done diff --git a/tests/CI/docker-compose.yml b/tests/CI/docker-compose.yml index e3de9efb188..c4d344b2cb7 100644 --- a/tests/CI/docker-compose.yml +++ b/tests/CI/docker-compose.yml @@ -1,5 +1,11 @@ version: '3.4' +volumes: + # Volume used to store the config of diracx + diracx-cs-store: + # Volume used to store the pair of keys to sign the tokens + diracx-key-store: + services: mysql: image: ${MYSQL_VER} @@ -42,7 +48,6 @@ services: retries: 15 start_period: 60s - # Mock of an S3 storage s3-direct: image: adobe/s3mock @@ -55,18 +60,45 @@ services: - initialBuckets=my-first-bucket - debug=true + + diracx-wait-for-db: + + image: ${MYSQL_VER} + container_name: diracx-wait-for-db + depends_on: + mysql: + condition: service_healthy + command: /home/dirac/LocalRepo/ALTERNATIVE_MODULES/DIRAC/tests/CI/check_db_initialized.sh + + + dirac-server: image: ${CI_REGISTRY_IMAGE}/${HOST_OS}-dirac container_name: server hostname: server user: "${DIRAC_UID}:${DIRAC_GID}" + depends_on: mysql: condition: service_healthy elasticsearch: condition: service_healthy + s3-direct: + condition: service_started + iam-login-service: + condition: service_started + diracx-init-key: + condition: service_completed_successfully # Let the init container create the cs + diracx-init-cs: + condition: service_completed_successfully # Let the init container create the cs ulimits: nofile: 8192 + volumes: + - diracx-cs-store:/cs_store + - diracx-key-store:/signing-key + environment: + - DIRACX_CONFIG_BACKEND_URL=git+file:///cs_store/initialRepo + - DIRACX_SERVICE_AUTH_TOKEN_KEY=file:///signing-key/rs256.key dirac-client: image: ${CI_REGISTRY_IMAGE}/${HOST_OS}-dirac @@ -77,3 +109,54 @@ services: - dirac-server ulimits: nofile: 8192 + + + + diracx-init-key: + image: ghcr.io/diracgrid/diracx/server + container_name: diracx-init-key + environment: + - DIRACX_SERVICE_AUTH_TOKEN_KEY="file:///signing-key/rs256.key" + volumes: + - diracx-key-store:/signing-key/ + # We need to allow everybody to read the private keys + # Because the users are different between the DIRAC and DiracX containers + entrypoint: | + /dockerMicroMambaEntrypoint.sh bash -c "ssh-keygen -P '' -trsa -b4096 -mPEM -f/signing-key/rs256.key && /dockerMicroMambaEntrypoint.sh chmod o+r /signing-key/rs256.*" + + diracx-init-cs: + image: ghcr.io/diracgrid/diracx/server + container_name: diracx-init-cs + environment: + - DIRACX_CONFIG_BACKEND_URL=git+file:///cs_store/initialRepo + - DIRACX_SERVICE_AUTH_TOKEN_KEY=file:///signing-key/rs256.key + volumes: + - diracx-cs-store:/cs_store/ + - diracx-key-store:/signing-key/ + entrypoint: | + /dockerMicroMambaEntrypoint.sh dirac internal generate-cs /cs_store/initialRepo --vo=diracAdmin --user-group=admin --idp-url=http://dsdsd.csds/a/b + + diracx: + image: ghcr.io/diracgrid/diracx/server + container_name: diracx + environment: + - DIRACX_CONFIG_BACKEND_URL=git+file:///cs_store/initialRepo + - "DIRACX_DB_URL_AUTHDB=sqlite+aiosqlite:///:memory:" + - DIRACX_DB_URL_JOBDB=mysql+aiomysql://Dirac:Dirac@mysql/JobDB + - DIRACX_SERVICE_AUTH_TOKEN_KEY=file:///signing-key/rs256.key + - DIRACX_SERVICE_AUTH_ALLOWED_REDIRECTS=["http://diracx:8000/docs/oauth2-redirect"] + ports: + - 8000:8000 + depends_on: + diracx-wait-for-db: + condition: service_completed_successfully + volumes: + - diracx-cs-store:/cs_store/ + - diracx-key-store:/signing-key/ + + healthcheck: + test: ["CMD", "/dockerMicroMambaEntrypoint.sh", "curl", "-f", "http://localhost:8000/.well-known/openid-configuration"] + interval: 5s + timeout: 2s + retries: 15 + start_period: 60s diff --git a/tests/CI/exportCSLoop.sh b/tests/CI/exportCSLoop.sh new file mode 100755 index 00000000000..c5eb5132ad8 --- /dev/null +++ b/tests/CI/exportCSLoop.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +# This script will export to the `Production.cfg` file to the +# yaml format for diracx every 5 seconds + +source /home/dirac/ServerInstallDIR/bashrc +git config --global user.name "DIRAC Server CI" +git config --global user.email "dirac-server-ci@invalid" + +while true; +do + curl -L https://gitlab.cern.ch/chaen/chris-hackaton-cs/-/raw/master/convert-from-legacy.py |DIRAC_COMPAT_ENABLE_CS_CONVERSION=True ~/ServerInstallDIR/diracos/bin/python - ~/ServerInstallDIR/etc/Production.cfg /cs_store/initialRepo/ + git -C /cs_store/initialRepo/ commit -am "export $(date)" + sleep 5; +done From e35067099c4ad08781a5c9aa72eb813b93ab21df Mon Sep 17 00:00:00 2001 From: Christophe Haen Date: Tue, 5 Sep 2023 14:50:30 +0200 Subject: [PATCH 5/8] refactor (Operations): remove the Setup subsections --- .../Client/Helpers/Operations.py | 86 +++---------------- 1 file changed, 10 insertions(+), 76 deletions(-) diff --git a/src/DIRAC/ConfigurationSystem/Client/Helpers/Operations.py b/src/DIRAC/ConfigurationSystem/Client/Helpers/Operations.py index bf0f791b208..8e06d7c2113 100644 --- a/src/DIRAC/ConfigurationSystem/Client/Helpers/Operations.py +++ b/src/DIRAC/ConfigurationSystem/Client/Helpers/Operations.py @@ -6,56 +6,19 @@ Operations/ Defaults/ + someOption = someValue + aSecondOption = aSecondValue + specificVo/ someSection/ - someOption = someValue - aSecondOption = aSecondValue - Production/ - someSection/ - someOption = someValueInProduction - aSecondOption = aSecondValueInProduction - Certification/ - someSection/ - someOption = someValueInCertification + someOption = someValueInVO The following calls would give different results based on the setup:: Operations().getValue('someSection/someOption') - - someValueInProduction if we are in 'Production' setup - - someValueInCertification if we are in 'Certification' setup - - Operations().getValue('someSection/aSecondOption') - - aSecondValueInProduction if we are in 'Production' setup - - aSecondValue if we are in 'Certification' setup <- looking in Defaults - since there's no Certification/someSection/aSecondOption - + - someValueInVO if we are in 'specificVo' vo + - someValue if we are in any other VO - At the same time, for multi-VO installations, it is also possible to specify different options per-VO, - like the following:: - - Operations/ - aVOName/ - Defaults/ - someSection/ - someOption = someValue - aSecondOption = aSecondValue - Production/ - someSection/ - someOption = someValueInProduction - aSecondOption = aSecondValueInProduction - Certification/ - someSection/ - someOption = someValueInCertification - anotherVOName/ - Defaults/ - someSectionName/ - someOptionX = someValueX - aSecondOption = aSecondValue - setupName/ - someSection/ - someOption = someValueInProduction - aSecondOption = aSecondValueInProduction - - For this case it becomes then important for the Operations() objects to know the VO name + It becomes then important for the Operations() objects to know the VO name for which we want the information, and this can be done in the following ways. 1. by specifying the VO name directly:: @@ -98,9 +61,7 @@ def __init__(self, vo=False, group=False, setup=False): """ self.__uVO = vo self.__uGroup = group - self.__uSetup = setup self.__vo = False - self.__setup = False self.__discoverSettings() def __discoverSettings(self): @@ -119,12 +80,6 @@ def __discoverSettings(self): result = getVOfromProxyGroup() if result["OK"]: self.__vo = result["Value"] - # Set the setup - self.__setup = False - if self.__uSetup: - self.__setup = self.__uSetup - else: - self.__setup = CSGlobals.getSetup() def __getCache(self): Operations.__cacheLock.acquire() @@ -134,7 +89,7 @@ def __getCache(self): Operations.__cache = {} Operations.__cacheVersion = currentVersion - cacheKey = (self.__vo, self.__setup) + cacheKey = (self.__vo,) if cacheKey in Operations.__cache: return Operations.__cache[cacheKey] @@ -155,14 +110,13 @@ def __getCache(self): pass def __getSearchPaths(self): - paths = ["/Operations/Defaults", f"/Operations/{self.__setup}"] + paths = ["/Operations/Defaults"] if not self.__vo: globalVO = CSGlobals.getVO() if not globalVO: return paths self.__vo = CSGlobals.getVO() - paths.append(f"/Operations/{self.__vo}/Defaults") - paths.append(f"/Operations/{self.__vo}/{self.__setup}") + paths.append(f"/Operations/{self.__vo}/") return paths def getValue(self, optionPath, defaultValue=None): @@ -202,26 +156,6 @@ def getOptionsDict(self, sectionPath): data[opName] = sectionCFG[opName] return S_OK(data) - def getPath(self, option, vo=False, setup=False): - """ - Generate the CS path for an option: - - - if vo is not defined, the helper's vo will be used for multi VO installations - - if setup evaluates False (except None) -> The helpers setup will be used - - if setup is defined -> whatever is defined will be used as setup - - if setup is None -> Defaults will be used - - :param option: path with respect to the Operations standard path - :type option: string - """ - - for path in self.__getSearchPaths(): - optionPath = os.path.join(path, option) - value = gConfig.getValue(optionPath, "NoValue") - if value != "NoValue": - return optionPath - return "" - def getMonitoringBackends(self, monitoringType=None): """ Chooses the type of backend to use (Monitoring and/or Accounting) depending on the MonitoringType. From dc011c2e443d31ce6fb8dbe65461762bfa0f2863 Mon Sep 17 00:00:00 2001 From: Christophe Haen Date: Tue, 5 Sep 2023 14:54:25 +0200 Subject: [PATCH 6/8] docs: mock diracx when building docs --- docs/diracdoctools/__init__.py | 1 + .../FutureClient/JobMonitoringClient.py | 52 +++++++++---------- 2 files changed, 26 insertions(+), 27 deletions(-) diff --git a/docs/diracdoctools/__init__.py b/docs/diracdoctools/__init__.py index 5bc09f91ae7..5fb377db8fd 100644 --- a/docs/diracdoctools/__init__.py +++ b/docs/diracdoctools/__init__.py @@ -9,6 +9,7 @@ "_arc", "arc", "cmreslogging", + "diracx", "fts3", "gfal2", "git", diff --git a/src/DIRAC/WorkloadManagementSystem/FutureClient/JobMonitoringClient.py b/src/DIRAC/WorkloadManagementSystem/FutureClient/JobMonitoringClient.py index dfbf98c3d54..5759a8e7fed 100644 --- a/src/DIRAC/WorkloadManagementSystem/FutureClient/JobMonitoringClient.py +++ b/src/DIRAC/WorkloadManagementSystem/FutureClient/JobMonitoringClient.py @@ -1,36 +1,34 @@ # pylint: disable=import-error -import os +from diracx.client import Dirac +from diracx.client.models import JobSearchParams -if os.getenv("DIRAC_ENABLE_DIRACX_JOB_MONITORING", "No").lower() in ("yes", "true"): - from diracx.client import Dirac - from diracx.client.models import JobSearchParams +from diracx.cli.utils import get_auth_headers +from diracx.core.preferences import DiracxPreferences - from diracx.cli.utils import get_auth_headers - from diracx.core.preferences import DiracxPreferences +from DIRAC.Core.Utilities.ReturnValues import convertToReturnValue - from DIRAC.Core.Utilities.ReturnValues import convertToReturnValue - class JobMonitoringClient: - def __init__(self, *args, **kwargs): - self.endpoint = DiracxPreferences().url +class JobMonitoringClient: + def __init__(self, *args, **kwargs): + self.endpoint = DiracxPreferences().url - def fetch(self, parameters, jobIDs): - with Dirac(endpoint=self.endpoint) as api: - jobs = api.jobs.search( - parameters=["JobID"] + parameters, - search=[{"parameter": "JobID", "operator": "in", "values": jobIDs}], - headers=get_auth_headers(), - ) - return {j["JobID"]: {param: j[param] for param in parameters} for j in jobs} + def fetch(self, parameters, jobIDs): + with Dirac(endpoint=self.endpoint) as api: + jobs = api.jobs.search( + parameters=["JobID"] + parameters, + search=[{"parameter": "JobID", "operator": "in", "values": jobIDs}], + headers=get_auth_headers(), + ) + return {j["JobID"]: {param: j[param] for param in parameters} for j in jobs} - @convertToReturnValue - def getJobsMinorStatus(self, jobIDs): - return self.fetch(["MinorStatus"], jobIDs) + @convertToReturnValue + def getJobsMinorStatus(self, jobIDs): + return self.fetch(["MinorStatus"], jobIDs) - @convertToReturnValue - def getJobsStates(self, jobIDs): - return self.fetch(["Status", "MinorStatus", "ApplicationStatus"], jobIDs) + @convertToReturnValue + def getJobsStates(self, jobIDs): + return self.fetch(["Status", "MinorStatus", "ApplicationStatus"], jobIDs) - @convertToReturnValue - def getJobsSites(self, jobIDs): - return self.fetch(["Site"], jobIDs) + @convertToReturnValue + def getJobsSites(self, jobIDs): + return self.fetch(["Site"], jobIDs) From 4cf9dec5a70cf7052c0f1a21656a4242b06a8c79 Mon Sep 17 00:00:00 2001 From: Christophe Haen Date: Tue, 5 Sep 2023 15:02:21 +0200 Subject: [PATCH 7/8] refactor: take diracx token path from diracx --- integration_tests.py | 2 +- src/DIRAC/FrameworkSystem/scripts/dirac_login.py | 12 +++++------- .../FrameworkSystem/scripts/dirac_proxy_init.py | 11 +++++------ 3 files changed, 11 insertions(+), 14 deletions(-) diff --git a/integration_tests.py b/integration_tests.py index e22d30ad79b..c8fda8ed758 100755 --- a/integration_tests.py +++ b/integration_tests.py @@ -317,7 +317,7 @@ def prepare_environment( docker_compose_fn_final = Path(tempfile.mkdtemp()) / "ci" typer.secho("Running docker-compose to create DiracX containers", fg=c.GREEN) - typer.secho(f"Will eave a folder behind: {docker_compose_fn_final}", fg=c.YELLOW) + typer.secho(f"Will leave a folder behind: {docker_compose_fn_final}", fg=c.YELLOW) with _gen_docker_compose(modules) as docker_compose_fn: # We cannot use the temporary directory created in the context manager because diff --git a/src/DIRAC/FrameworkSystem/scripts/dirac_login.py b/src/DIRAC/FrameworkSystem/scripts/dirac_login.py index 049ee2176e4..99dc0fbe09e 100644 --- a/src/DIRAC/FrameworkSystem/scripts/dirac_login.py +++ b/src/DIRAC/FrameworkSystem/scripts/dirac_login.py @@ -32,11 +32,6 @@ from DIRAC.Core.Base.Client import Client -# token location -DIRAC_TOKEN_FILE = Path.home() / ".cache" / "diracx" / "credentials.json" -EXPIRES_GRACE_SECONDS = 15 - - # At this point, we disable CS synchronization so that an error related # to the lack of a proxy certificate does not occur when trying to synchronize. # Synchronization will take place after passing the authorization algorithm (creating a proxy). @@ -320,10 +315,13 @@ def loginWithCertificate(self): # Get a token for use with diracx if os.getenv("DIRAC_ENABLE_DIRACX_LOGIN", "No").lower() in ("yes", "true"): + from diracx.cli import EXPIRES_GRACE_SECONDS # pylint: disable=import-error + from diracx.cli.utils import CREDENTIALS_PATH # pylint: disable=import-error + res = Client(url="Framework/ProxyManager").exchangeProxyForToken() if not res["OK"]: return res - DIRAC_TOKEN_FILE.parent.mkdir(parents=True, exist_ok=True) + CREDENTIALS_PATH.parent.mkdir(parents=True, exist_ok=True) expires = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta( seconds=res["Value"]["expires_in"] - EXPIRES_GRACE_SECONDS ) @@ -333,7 +331,7 @@ def loginWithCertificate(self): # TODO: "refresh_token_expires": "expires": expires.isoformat(), } - DIRAC_TOKEN_FILE.write_text(json.dumps(credential_data)) + CREDENTIALS_PATH.write_text(json.dumps(credential_data)) return S_OK() diff --git a/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py b/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py index 6fabcace1a9..4656024e280 100755 --- a/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py +++ b/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py @@ -26,10 +26,6 @@ from pathlib import Path -DIRAC_TOKEN_FILE = Path.home() / ".cache" / "diracx" / "credentials.json" -EXPIRES_GRACE_SECONDS = 15 - - class Params(ProxyGeneration.CLIParams): addVOMSExt = False uploadProxy = True @@ -245,10 +241,13 @@ def doTheMagic(self): return resultProxyUpload if os.getenv("DIRAC_ENABLE_DIRACX_LOGIN", "No").lower() in ("yes", "true"): + from diracx.cli import EXPIRES_GRACE_SECONDS # pylint: disable=import-error + from diracx.cli.utils import CREDENTIALS_PATH # pylint: disable=import-error + res = Client(url="Framework/ProxyManager").exchangeProxyForToken() if not res["OK"]: return res - DIRAC_TOKEN_FILE.parent.mkdir(parents=True, exist_ok=True) + CREDENTIALS_PATH.parent.mkdir(parents=True, exist_ok=True) expires = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta( seconds=res["Value"]["expires_in"] - EXPIRES_GRACE_SECONDS ) @@ -258,7 +257,7 @@ def doTheMagic(self): # TODO: "refresh_token_expires": "expires": expires.isoformat(), } - DIRAC_TOKEN_FILE.write_text(json.dumps(credential_data)) + CREDENTIALS_PATH.write_text(json.dumps(credential_data)) return S_OK() From 973bb727fbc0fd5af6a8b460925c99187172a854 Mon Sep 17 00:00:00 2001 From: Christophe Haen Date: Tue, 5 Sep 2023 15:09:48 +0200 Subject: [PATCH 8/8] test (diracx): waits for more DB to be ready --- tests/CI/check_db_initialized.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/CI/check_db_initialized.sh b/tests/CI/check_db_initialized.sh index 96cc47e6016..29d7b9f4af5 100755 --- a/tests/CI/check_db_initialized.sh +++ b/tests/CI/check_db_initialized.sh @@ -1,6 +1,6 @@ #!/bin/bash dbMissing=true; -allDBs=(JobDB FileCatalogDB) +allDBs=(AccountingDB FTS3DB JobDB JobLoggingDB PilotAgentsDB ProductionDB ProxyDB ReqDB ResourceManagementDB ResourceStatusDB SandboxMetadataDB StorageManagementDB TaskQueueDB TransformationDB) while ${dbMissing}; do dbMissing=false;