Skip to content

Tekton Module

The tekton module provides functions for managing Tekton pipelines in OpenShift.

tekton

Functions

installOpenShiftPipelines(dynClient, customStorageClassName=None)

Install the OpenShift Pipelines Operator and wait for it to be ready to use.

Creates the operator subscription, waits for the CRD and webhook to be ready, and handles PVC storage class configuration if needed.

Parameters:

Name Type Description Default
dynClient DynamicClient

OpenShift Dynamic Client

required
customStorageClassName str

Custom storage class name for Tekton PVC. Defaults to None.

None

Returns:

Name Type Description
bool bool

True if installation is successful, False otherwise

Raises:

Type Description
NotFoundError

If the package manifest is not found

UnprocessibleEntityError

If the subscription cannot be created

Source code in src/mas/devops/tekton.py
def installOpenShiftPipelines(dynClient: DynamicClient, customStorageClassName: str = None) -> bool:
    """
    Install the OpenShift Pipelines Operator and wait for it to be ready to use.

    Creates the operator subscription, waits for the CRD and webhook to be ready,
    and handles PVC storage class configuration if needed.

    Parameters:
        dynClient (DynamicClient): OpenShift Dynamic Client
        customStorageClassName (str, optional): Custom storage class name for Tekton PVC. Defaults to None.

    Returns:
        bool: True if installation is successful, False otherwise

    Raises:
        NotFoundError: If the package manifest is not found
        UnprocessibleEntityError: If the subscription cannot be created
    """
    packagemanifestAPI = dynClient.resources.get(api_version="packages.operators.coreos.com/v1", kind="PackageManifest")
    subscriptionsAPI = dynClient.resources.get(api_version="operators.coreos.com/v1alpha1", kind="Subscription")

    # Create the Operator Subscription
    try:
        if not crdExists(dynClient, "pipelines.tekton.dev"):
            manifest = packagemanifestAPI.get(name="openshift-pipelines-operator-rh", namespace="openshift-marketplace")
            defaultChannel = manifest.status.defaultChannel
            catalogSource = manifest.status.catalogSource
            catalogSourceNamespace = manifest.status.catalogSourceNamespace

            logger.info(f"OpenShift Pipelines Operator Details: {catalogSourceNamespace}/{catalogSource}@{defaultChannel}")

            templateDir = path.join(path.abspath(path.dirname(__file__)), "templates")
            env = Environment(
                loader=FileSystemLoader(searchpath=templateDir)
            )
            template = env.get_template("subscription.yml.j2")
            renderedTemplate = template.render(
                subscription_name="openshift-pipelines-operator",
                subscription_namespace="openshift-operators",
                package_name="openshift-pipelines-operator-rh",
                package_channel=defaultChannel,
                catalog_name=catalogSource,
                catalog_namespace=catalogSourceNamespace
            )
            subscription = yaml.safe_load(renderedTemplate)
            subscriptionsAPI.apply(body=subscription, namespace="openshift-operators")

    except NotFoundError:
        logger.warning("Error: Couldn't find package manifest for Red Hat Openshift Pipelines Operator")
    except UnprocessibleEntityError:
        logger.warning("Error: Couldn't create/update OpenShift Pipelines Operator Subscription")

    # Wait for the CRD to be available
    logger.debug("Waiting for tasks.tekton.dev CRD to be available")
    foundReadyCRD = waitForCRD(dynClient, "tasks.tekton.dev")
    if foundReadyCRD:
        logger.info("OpenShift Pipelines Operator is installed and ready")
    else:
        logger.error("OpenShift Pipelines Operator is NOT installed and ready")
        return False

    # Wait for the webhook to be ready
    logger.debug("Waiting for tekton-pipelines-webhook Deployment to be ready")
    foundReadyWebhook = waitForDeployment(dynClient, namespace="openshift-pipelines", deploymentName="tekton-pipelines-webhook")
    if foundReadyWebhook:
        logger.info("OpenShift Pipelines Webhook is installed and ready")
    else:
        logger.error("OpenShift Pipelines Webhook is NOT installed and ready")
        return False

    # Workaround for bug in OpenShift Pipelines/Tekton
    # -------------------------------------------------------------------------
    # Wait for the postgredb-tekton-results-postgres-0 PVC to be ready
    # this PVC doesn't come up when there's no default storage class is in the cluster,
    # this is causing the pvc to be in pending state and causing the tekton-results-postgres statefulSet in pending,
    # due to these resources not coming up, the MAS pre-install check in the pipeline times out checking the health of this statefulSet,
    # causing failure in pipeline.
    # Refer https://github.com/ibm-mas/cli/issues/1511
    logger.debug("Checking postgredb-tekton-results-postgres-0 PVC status")

    pvcAPI = dynClient.resources.get(api_version="v1", kind="PersistentVolumeClaim")
    pvcName = "postgredb-tekton-results-postgres-0"
    pvcNamespace = "openshift-pipelines"

    # Wait briefly for PVC to be created (max 5 minutes)
    maxInitialRetries = 60
    pvc = None
    for retry in range(maxInitialRetries):
        try:
            pvc = pvcAPI.get(name=pvcName, namespace=pvcNamespace)
            break
        except NotFoundError:
            if retry < maxInitialRetries - 1:
                logger.debug(f"Waiting 5s for PVC {pvcName} to be created (attempt {retry + 1}/{maxInitialRetries})...")
                sleep(5)

    if pvc is None:
        logger.error(f"PVC {pvcName} was not created after {maxInitialRetries * 5} seconds (5 minutes)")
        return False

    # Check if PVC is already bound
    if pvc.status.phase == "Bound":
        logger.info("OpenShift Pipelines postgres PVC is already bound and ready")
        return True

    # Check if PVC is pending without a storage class - needs immediate patching
    if pvc.status.phase == "Pending" and pvc.spec.storageClassName is None:
        logger.info("PVC is pending without storage class, attempting to patch immediately...")
        tektonPVCisReady = addMissingStorageClassToTektonPVC(
            dynClient=dynClient,
            namespace=pvcNamespace,
            pvcName=pvcName,
            storageClassName=customStorageClassName
        )
        if tektonPVCisReady:
            logger.info("OpenShift Pipelines postgres is installed and ready")
            return True
        else:
            logger.error("OpenShift Pipelines postgres PVC is NOT ready after patching")
            return False

    # PVC exists with storage class but not bound yet - wait for it to bind
    logger.debug(f"PVC has storage class '{pvc.spec.storageClassName}', waiting for it to be bound...")
    foundReadyPVC = waitForPVC(dynClient, namespace=pvcNamespace, pvcName=pvcName)
    if foundReadyPVC:
        logger.info("OpenShift Pipelines postgres is installed and ready")
        return True
    else:
        logger.error("OpenShift Pipelines postgres PVC is NOT ready")
        return False

addMissingStorageClassToTektonPVC(dynClient, namespace, pvcName, storageClassName=None)

OpenShift Pipelines has a problem when there is no default storage class defined in a cluster, this function patches the PVC used to store pipeline results to add a specific storage class into the PVC spec and waits for the PVC to be bound.

:param dynClient: Kubernetes client, required to work with PVC :type dynClient: DynamicClient :param namespace: Namespace where OpenShift Pipelines is installed :type namespace: str :param pvcName: Name of the PVC that we want to fix :type pvcName: str :param storageClassName: Name of the storage class that we want to update the PVC to reference (optional, will auto-select if not provided) :type storageClassName: str :return: True if PVC is successfully patched and bound, False otherwise :rtype: bool

Source code in src/mas/devops/tekton.py
def addMissingStorageClassToTektonPVC(dynClient: DynamicClient, namespace: str, pvcName: str, storageClassName: str = None) -> bool:
    """
    OpenShift Pipelines has a problem when there is no default storage class defined in a cluster, this function
    patches the PVC used to store pipeline results to add a specific storage class into the PVC spec and waits for the
    PVC to be bound.

    :param dynClient: Kubernetes client, required to work with PVC
    :type dynClient: DynamicClient
    :param namespace: Namespace where OpenShift Pipelines is installed
    :type namespace: str
    :param pvcName: Name of the PVC that we want to fix
    :type pvcName: str
    :param storageClassName: Name of the storage class that we want to update the PVC to reference (optional, will auto-select if not provided)
    :type storageClassName: str
    :return: True if PVC is successfully patched and bound, False otherwise
    :rtype: bool
    """
    pvcAPI = dynClient.resources.get(api_version="v1", kind="PersistentVolumeClaim")
    storageClassAPI = dynClient.resources.get(api_version="storage.k8s.io/v1", kind="StorageClass")

    try:
        pvc = pvcAPI.get(name=pvcName, namespace=namespace)

        # Check if PVC is pending and has no storage class
        if pvc.status.phase == "Pending" and pvc.spec.storageClassName is None:
            # Determine which storage class to use
            targetStorageClass = None

            if storageClassName is not None:
                # Verify the provided storage class exists
                try:
                    storageClassAPI.get(name=storageClassName)
                    targetStorageClass = storageClassName
                    logger.info(f"Using provided storage class '{storageClassName}' for PVC {pvcName}")
                except NotFoundError:
                    logger.warning(f"Provided storage class '{storageClassName}' not found, will try to detect available storage class")

            # If no valid custom storage class, try to detect one
            if targetStorageClass is None:
                logger.warning("No storage class provided or provided storage class not found, attempting to use first available storage class")
                storageClasses = getStorageClasses(dynClient)
                if len(storageClasses) > 0:
                    # Use the first available storage class
                    targetStorageClass = storageClasses[0].metadata.name
                    logger.info(f"Using first available storage class '{targetStorageClass}' for PVC {pvcName}")
                else:
                    logger.error(f"Unable to set storageClassName in PVC {pvcName}. No storage classes available in the cluster.")
                    return False

            # Patch the PVC with the storage class
            pvc.spec.storageClassName = targetStorageClass
            logger.info(f"Patching PVC {pvcName} with storageClassName: {targetStorageClass}")
            pvcAPI.patch(body=pvc, namespace=namespace)

            # Wait for the PVC to be bound
            maxRetries = 60
            foundReadyPVC = False
            retries = 0
            while not foundReadyPVC and retries < maxRetries:
                retries += 1
                try:
                    patchedPVC = pvcAPI.get(name=pvcName, namespace=namespace)
                    if patchedPVC.status.phase == "Bound":
                        foundReadyPVC = True
                        logger.info(f"PVC {pvcName} is now bound")
                    else:
                        logger.debug(f"Waiting 5s for PVC {pvcName} to be bound before checking again ...")
                        sleep(5)
                except NotFoundError:
                    logger.error(f"The patched PVC {pvcName} does not exist.")
                    return False

            return foundReadyPVC
        else:
            logger.warning(f"PVC {pvcName} is not in Pending state or already has a storageClassName")
            return pvc.status.phase == "Bound"

    except NotFoundError:
        logger.error(f"PVC {pvcName} does not exist")
        return False

updateTektonDefinitions(namespace, yamlFile)

Install or update MAS Tekton pipeline and task definitions from a YAML file.

Uses kubectl to apply a YAML file containing multiple resource types.

Parameters:

Name Type Description Default
namespace str

The namespace to apply the definitions to

required
yamlFile str

Path to the YAML file containing Tekton definitions

required

Returns:

Type Description
None

None

Raises:

Type Description
KubectlCommandError

If kubectl command fails

Source code in src/mas/devops/tekton.py
def updateTektonDefinitions(namespace: str, yamlFile: str) -> None:
    """
    Install or update MAS Tekton pipeline and task definitions from a YAML file.

    Uses kubectl to apply a YAML file containing multiple resource types.

    Parameters:
        namespace (str): The namespace to apply the definitions to
        yamlFile (str): Path to the YAML file containing Tekton definitions

    Returns:
        None

    Raises:
        kubeconfig.exceptions.KubectlCommandError: If kubectl command fails
    """
    result = kubectl.run(subcmd_args=['apply', '-n', namespace, '-f', yamlFile])
    for line in result.split("\n"):
        logger.debug(line)

preparePipelinesNamespace(dynClient, instanceId=None, storageClass=None, accessMode=None, waitForBind=True, configureRBAC=True)

Prepare a namespace for MAS pipelines by creating RBAC and PVC resources.

Creates cluster-wide or instance-specific pipeline namespace with necessary role bindings and persistent volume claims.

Parameters:

Name Type Description Default
dynClient DynamicClient

OpenShift Dynamic Client

required
instanceId str

MAS instance ID. If None, creates cluster-wide namespace. Defaults to None.

None
storageClass str

Storage class for the PVC. Defaults to None.

None
accessMode str

Access mode for the PVC. Defaults to None.

None
waitForBind bool

Whether to wait for PVC to bind. Defaults to True.

True
configureRBAC bool

Whether to configure RBAC. Defaults to True.

True

Returns:

Type Description

None

Raises:

Type Description
NotFoundError

If resources cannot be created

Source code in src/mas/devops/tekton.py
def preparePipelinesNamespace(dynClient: DynamicClient, instanceId: str = None, storageClass: str = None, accessMode: str = None, waitForBind: bool = True, configureRBAC: bool = True):
    """
    Prepare a namespace for MAS pipelines by creating RBAC and PVC resources.

    Creates cluster-wide or instance-specific pipeline namespace with necessary
    role bindings and persistent volume claims.

    Parameters:
        dynClient (DynamicClient): OpenShift Dynamic Client
        instanceId (str, optional): MAS instance ID. If None, creates cluster-wide namespace. Defaults to None.
        storageClass (str, optional): Storage class for the PVC. Defaults to None.
        accessMode (str, optional): Access mode for the PVC. Defaults to None.
        waitForBind (bool, optional): Whether to wait for PVC to bind. Defaults to True.
        configureRBAC (bool, optional): Whether to configure RBAC. Defaults to True.

    Returns:
        None

    Raises:
        NotFoundError: If resources cannot be created
    """
    templateDir = path.join(path.abspath(path.dirname(__file__)), "templates")
    env = Environment(
        loader=FileSystemLoader(searchpath=templateDir)
    )
    if instanceId is None:
        namespace = "mas-pipelines"
        template = env.get_template("pipelines-rbac-cluster.yml.j2")
    else:
        namespace = f"mas-{instanceId}-pipelines"
        template = env.get_template("pipelines-rbac.yml.j2")

    if configureRBAC:
        # Create RBAC
        renderedTemplate = template.render(mas_instance_id=instanceId)
        logger.debug(renderedTemplate)
        crb = yaml.safe_load(renderedTemplate)
        clusterRoleBindingAPI = dynClient.resources.get(api_version="rbac.authorization.k8s.io/v1", kind="ClusterRoleBinding")
        clusterRoleBindingAPI.apply(body=crb, namespace=namespace)

    # Create PVC (instanceId namespace only)
    if instanceId is not None:
        template = env.get_template("pipelines-pvc.yml.j2")
        renderedTemplate = template.render(
            mas_instance_id=instanceId,
            pipeline_storage_class=storageClass,
            pipeline_storage_accessmode=accessMode
        )
        logger.debug(renderedTemplate)
        pvc = yaml.safe_load(renderedTemplate)
        pvcAPI = dynClient.resources.get(api_version="v1", kind="PersistentVolumeClaim")
        pvcAPI.apply(body=pvc, namespace=namespace)

    if instanceId is not None and waitForBind:
        logger.debug("Waiting for PVC to be bound")
        pvcIsBound = False
        while not pvcIsBound:
            configPVC = pvcAPI.get(name="config-pvc", namespace=namespace)
            if configPVC.status.phase == "Bound":
                pvcIsBound = True
            else:
                logger.debug("Waiting 15s before checking status of PVC again")
                logger.debug(configPVC)
                sleep(15)

prepareAiServicePipelinesNamespace(dynClient, instanceId=None, storageClass=None, accessMode=None, waitForBind=True, configureRBAC=True)

Prepare a namespace for AI Service pipelines by creating RBAC and PVC resources.

Creates AI Service-specific pipeline namespace with necessary role bindings and persistent volume claims.

Parameters:

Name Type Description Default
dynClient DynamicClient

OpenShift Dynamic Client

required
instanceId str

AI Service instance ID. Defaults to None.

None
storageClass str

Storage class for the PVC. Defaults to None.

None
accessMode str

Access mode for the PVC. Defaults to None.

None
waitForBind bool

Whether to wait for PVC to bind. Defaults to True.

True
configureRBAC bool

Whether to configure RBAC. Defaults to True.

True

Returns:

Type Description

None

Raises:

Type Description
NotFoundError

If resources cannot be created

Source code in src/mas/devops/tekton.py
def prepareAiServicePipelinesNamespace(dynClient: DynamicClient, instanceId: str = None, storageClass: str = None, accessMode: str = None, waitForBind: bool = True, configureRBAC: bool = True):
    """
    Prepare a namespace for AI Service pipelines by creating RBAC and PVC resources.

    Creates AI Service-specific pipeline namespace with necessary role bindings
    and persistent volume claims.

    Parameters:
        dynClient (DynamicClient): OpenShift Dynamic Client
        instanceId (str, optional): AI Service instance ID. Defaults to None.
        storageClass (str, optional): Storage class for the PVC. Defaults to None.
        accessMode (str, optional): Access mode for the PVC. Defaults to None.
        waitForBind (bool, optional): Whether to wait for PVC to bind. Defaults to True.
        configureRBAC (bool, optional): Whether to configure RBAC. Defaults to True.

    Returns:
        None

    Raises:
        NotFoundError: If resources cannot be created
    """
    templateDir = path.join(path.abspath(path.dirname(__file__)), "templates")
    env = Environment(
        loader=FileSystemLoader(searchpath=templateDir)
    )
    namespace = f"aiservice-{instanceId}-pipelines"
    template = env.get_template("aiservice-pipelines-rbac.yml.j2")

    if configureRBAC:
        # Create RBAC
        renderedTemplate = template.render(aiservice_instance_id=instanceId)
        logger.debug(renderedTemplate)
        crb = yaml.safe_load(renderedTemplate)
        clusterRoleBindingAPI = dynClient.resources.get(api_version="rbac.authorization.k8s.io/v1", kind="ClusterRoleBinding")
        clusterRoleBindingAPI.apply(body=crb, namespace=namespace)

    template = env.get_template("aiservice-pipelines-pvc.yml.j2")
    renderedTemplate = template.render(
        aiservice_instance_id=instanceId,
        pipeline_storage_class=storageClass,
        pipeline_storage_accessmode=accessMode
    )
    logger.debug(renderedTemplate)
    pvc = yaml.safe_load(renderedTemplate)
    pvcAPI = dynClient.resources.get(api_version="v1", kind="PersistentVolumeClaim")
    pvcAPI.apply(body=pvc, namespace=namespace)

    if waitForBind:
        logger.debug("Waiting for PVC to be bound")
        pvcIsBound = False
        while not pvcIsBound:
            configPVC = pvcAPI.get(name="config-pvc", namespace=namespace)
            if configPVC.status.phase == "Bound":
                pvcIsBound = True
            else:
                logger.debug("Waiting 15s before checking status of PVC again")
                logger.debug(configPVC)
                sleep(15)

prepareInstallSecrets(dynClient, namespace, slsLicenseFile=None, additionalConfigs=None, certs=None, podTemplates=None)

Create or update secrets required for MAS installation pipelines.

Creates four secrets in the specified namespace: pipeline-additional-configs, pipeline-sls-entitlement, pipeline-certificates, and pipeline-pod-templates.

Parameters:

Name Type Description Default
dynClient DynamicClient

OpenShift Dynamic Client

required
namespace str

The namespace to create secrets in

required
slsLicenseFile str

SLS license file content. Defaults to None (empty secret).

None
additionalConfigs dict

Additional configuration data. Defaults to None (empty secret).

None
certs str

Certificate data. Defaults to None (empty secret).

None
podTemplates str

Pod template data. Defaults to None (empty secret).

None

Returns:

Type Description
None

None

Raises:

Type Description
NotFoundError

If secrets cannot be created

Source code in src/mas/devops/tekton.py
def prepareInstallSecrets(dynClient: DynamicClient, namespace: str, slsLicenseFile: str = None, additionalConfigs: dict = None, certs: str = None, podTemplates: str = None) -> None:
    """
    Create or update secrets required for MAS installation pipelines.

    Creates four secrets in the specified namespace: pipeline-additional-configs,
    pipeline-sls-entitlement, pipeline-certificates, and pipeline-pod-templates.

    Parameters:
        dynClient (DynamicClient): OpenShift Dynamic Client
        namespace (str): The namespace to create secrets in
        slsLicenseFile (str, optional): SLS license file content. Defaults to None (empty secret).
        additionalConfigs (dict, optional): Additional configuration data. Defaults to None (empty secret).
        certs (str, optional): Certificate data. Defaults to None (empty secret).
        podTemplates (str, optional): Pod template data. Defaults to None (empty secret).

    Returns:
        None

    Raises:
        NotFoundError: If secrets cannot be created
    """
    secretsAPI = dynClient.resources.get(api_version="v1", kind="Secret")

    # 1. Secret/pipeline-additional-configs
    # -------------------------------------------------------------------------
    # Must exist, but can be empty
    try:
        secretsAPI.delete(name="pipeline-additional-configs", namespace=namespace)
    except NotFoundError:
        pass

    if additionalConfigs is None:
        additionalConfigs = {
            "apiVersion": "v1",
            "kind": "Secret",
            "type": "Opaque",
            "metadata": {
                "name": "pipeline-additional-configs"
            }
        }
    secretsAPI.create(body=additionalConfigs, namespace=namespace)

    # 2. Secret/pipeline-sls-entitlement
    # -------------------------------------------------------------------------
    try:
        secretsAPI.delete(name="pipeline-sls-entitlement", namespace=namespace)
    except NotFoundError:
        pass

    if slsLicenseFile is None:
        slsLicenseFile = {
            "apiVersion": "v1",
            "kind": "Secret",
            "type": "Opaque",
            "metadata": {
                "name": "pipeline-sls-entitlement"
            }
        }
    secretsAPI.create(body=slsLicenseFile, namespace=namespace)

    # 3. Secret/pipeline-certificates
    # -------------------------------------------------------------------------
    # Must exist. It could be an empty secret at the first place before customer configure it
    try:
        secretsAPI.delete(name="pipeline-certificates", namespace=namespace)
    except NotFoundError:
        pass

    if certs is None:
        certs = {
            "apiVersion": "v1",
            "kind": "Secret",
            "type": "Opaque",
            "metadata": {
                "name": "pipeline-certificates"
            }
        }
    secretsAPI.create(body=certs, namespace=namespace)

    # 4. Secret/pipeline-pod-templates
    # -------------------------------------------------------------------------
    # Must exist, but can be empty
    try:
        secretsAPI.delete(name="pipeline-pod-templates", namespace=namespace)
    except NotFoundError:
        pass

    if podTemplates is None:
        podTemplates = {
            "apiVersion": "v1",
            "kind": "Secret",
            "type": "Opaque",
            "metadata": {
                "name": "pipeline-pod-templates"
            }
        }
    secretsAPI.create(body=podTemplates, namespace=namespace)

launchUpgradePipeline(dynClient, instanceId, skipPreCheck=False, masChannel='', params={})

Create a PipelineRun to upgrade the chosen MAS instance

Source code in src/mas/devops/tekton.py
def launchUpgradePipeline(dynClient: DynamicClient,
                          instanceId: str,
                          skipPreCheck: bool = False,
                          masChannel: str = "",
                          params: dict = {}) -> str:
    """
    Create a PipelineRun to upgrade the chosen MAS instance
    """
    pipelineRunsAPI = dynClient.resources.get(api_version="tekton.dev/v1beta1", kind="PipelineRun")
    namespace = f"mas-{instanceId}-pipelines"
    timestamp = datetime.now().strftime("%y%m%d-%H%M")
    # Create the PipelineRun
    templateDir = path.join(path.abspath(path.dirname(__file__)), "templates")
    env = Environment(
        loader=FileSystemLoader(searchpath=templateDir)
    )
    template = env.get_template("pipelinerun-upgrade.yml.j2")
    renderedTemplate = template.render(
        timestamp=timestamp,
        mas_instance_id=instanceId,
        skip_pre_check=skipPreCheck,
        mas_channel=masChannel,
        **params
    )
    logger.debug(renderedTemplate)
    pipelineRun = yaml.safe_load(renderedTemplate)
    pipelineRunsAPI.apply(body=pipelineRun, namespace=namespace)

    pipelineURL = f"{getConsoleURL(dynClient)}/k8s/ns/mas-{instanceId}-pipelines/tekton.dev~v1beta1~PipelineRun/{instanceId}-upgrade-{timestamp}"
    return pipelineURL

launchUninstallPipeline(dynClient, instanceId, droNamespace, uninstallCertManager=False, uninstallGrafana=False, uninstallCatalog=False, uninstallDRO=False, uninstallMongoDb=False, uninstallSLS=False)

Create a PipelineRun to uninstall the chosen MAS instance (and selected dependencies)

Source code in src/mas/devops/tekton.py
def launchUninstallPipeline(dynClient: DynamicClient,
                            instanceId: str,
                            droNamespace: str,
                            uninstallCertManager: bool = False,
                            uninstallGrafana: bool = False,
                            uninstallCatalog: bool = False,
                            uninstallDRO: bool = False,
                            uninstallMongoDb: bool = False,
                            uninstallSLS: bool = False) -> str:
    """
    Create a PipelineRun to uninstall the chosen MAS instance (and selected dependencies)
    """
    pipelineRunsAPI = dynClient.resources.get(api_version="tekton.dev/v1beta1", kind="PipelineRun")
    namespace = f"mas-{instanceId}-pipelines"
    timestamp = datetime.now().strftime("%y%m%d-%H%M")
    # Create the PipelineRun
    templateDir = path.join(path.abspath(path.dirname(__file__)), "templates")
    env = Environment(
        loader=FileSystemLoader(searchpath=templateDir)
    )
    template = env.get_template("pipelinerun-uninstall.yml.j2")

    grafanaAction = "uninstall" if uninstallGrafana else "none"
    certManagerAction = "uninstall" if uninstallCertManager else "none"
    ibmCatalogAction = "uninstall" if uninstallCatalog else "none"
    mongoDbAction = "uninstall" if uninstallMongoDb else "none"
    slsAction = "uninstall" if uninstallSLS else "none"
    droAction = "uninstall" if uninstallDRO else "none"

    # Render the pipelineRun
    renderedTemplate = template.render(
        timestamp=timestamp,
        mas_instance_id=instanceId,
        grafana_action=grafanaAction,
        cert_manager_action=certManagerAction,
        ibm_catalogs_action=ibmCatalogAction,
        mongodb_action=mongoDbAction,
        sls_action=slsAction,
        dro_action=droAction,
        dro_namespace=droNamespace
    )
    logger.debug(renderedTemplate)
    pipelineRun = yaml.safe_load(renderedTemplate)
    pipelineRunsAPI.apply(body=pipelineRun, namespace=namespace)

    pipelineURL = f"{getConsoleURL(dynClient)}/k8s/ns/mas-{instanceId}-pipelines/tekton.dev~v1beta1~PipelineRun/{instanceId}-uninstall-{timestamp}"
    return pipelineURL

launchPipelineRun(dynClient, namespace, templateName, params)

Launch a Tekton PipelineRun from a template.

Creates a PipelineRun resource by rendering a Jinja2 template with the provided parameters.

Parameters:

Name Type Description Default
dynClient DynamicClient

OpenShift Dynamic Client

required
namespace str

The namespace to create the PipelineRun in

required
templateName str

Name of the template file (without .yml.j2 extension)

required
params dict

Parameters to pass to the template

required

Returns:

Name Type Description
str str

Timestamp string used in the PipelineRun name (format: YYMMDD-HHMM)

Raises:

Type Description
NotFoundError

If the template or namespace is not found

Source code in src/mas/devops/tekton.py
def launchPipelineRun(dynClient: DynamicClient, namespace: str, templateName: str, params: dict) -> str:
    """
    Launch a Tekton PipelineRun from a template.

    Creates a PipelineRun resource by rendering a Jinja2 template with the provided parameters.

    Parameters:
        dynClient (DynamicClient): OpenShift Dynamic Client
        namespace (str): The namespace to create the PipelineRun in
        templateName (str): Name of the template file (without .yml.j2 extension)
        params (dict): Parameters to pass to the template

    Returns:
        str: Timestamp string used in the PipelineRun name (format: YYMMDD-HHMM)

    Raises:
        NotFoundError: If the template or namespace is not found
    """
    pipelineRunsAPI = dynClient.resources.get(api_version="tekton.dev/v1beta1", kind="PipelineRun")
    timestamp = datetime.now().strftime("%y%m%d-%H%M")
    # Create the PipelineRun
    templateDir = path.join(path.abspath(path.dirname(__file__)), "templates")
    env = Environment(
        loader=FileSystemLoader(searchpath=templateDir)
    )
    template = env.get_template(f"{templateName}.yml.j2")

    # Render the pipelineRun
    renderedTemplate = template.render(
        timestamp=timestamp,
        **params
    )
    logger.debug(renderedTemplate)
    pipelineRun = yaml.safe_load(renderedTemplate)
    pipelineRunsAPI.apply(body=pipelineRun, namespace=namespace)
    return timestamp

launchInstallPipeline(dynClient, params)

Create a PipelineRun to install a MAS or AI Service instance with selected dependencies.

Automatically detects whether to install MAS or AI Service based on the presence of mas_instance_id in params.

Parameters:

Name Type Description Default
dynClient DynamicClient

OpenShift Dynamic Client

required
params dict

Installation parameters including instance ID and configuration

required

Returns:

Name Type Description
str str

URL to the PipelineRun in the OpenShift console

Raises:

Type Description
NotFoundError

If resources cannot be created

Source code in src/mas/devops/tekton.py
def launchInstallPipeline(dynClient: DynamicClient, params: dict) -> str:
    """
    Create a PipelineRun to install a MAS or AI Service instance with selected dependencies.

    Automatically detects whether to install MAS or AI Service based on the presence
    of mas_instance_id in params.

    Parameters:
        dynClient (DynamicClient): OpenShift Dynamic Client
        params (dict): Installation parameters including instance ID and configuration

    Returns:
        str: URL to the PipelineRun in the OpenShift console

    Raises:
        NotFoundError: If resources cannot be created
    """
    applicationType = "aiservice" if not params.get("mas_instance_id") else "mas"
    params["applicationType"] = applicationType
    instanceId = params[f"{applicationType}_instance_id"]
    namespace = f"{applicationType}-{instanceId}-pipelines"
    timestamp = launchPipelineRun(dynClient, namespace, "pipelinerun-install", params)

    pipelineURL = f"{getConsoleURL(dynClient)}/k8s/ns/{applicationType}-{instanceId}-pipelines/tekton.dev~v1beta1~PipelineRun/{instanceId}-install-{timestamp}"
    return pipelineURL

launchUpdatePipeline(dynClient, params)

Create a PipelineRun to update the Maximo Operator Catalog.

Parameters:

Name Type Description Default
dynClient DynamicClient

OpenShift Dynamic Client

required
params dict

Update parameters

required

Returns:

Name Type Description
str str

URL to the PipelineRun in the OpenShift console

Raises:

Type Description
NotFoundError

If resources cannot be created

Source code in src/mas/devops/tekton.py
def launchUpdatePipeline(dynClient: DynamicClient, params: dict) -> str:
    """
    Create a PipelineRun to update the Maximo Operator Catalog.

    Parameters:
        dynClient (DynamicClient): OpenShift Dynamic Client
        params (dict): Update parameters

    Returns:
        str: URL to the PipelineRun in the OpenShift console

    Raises:
        NotFoundError: If resources cannot be created
    """
    namespace = "mas-pipelines"
    timestamp = launchPipelineRun(dynClient, namespace, "pipelinerun-update", params)

    pipelineURL = f"{getConsoleURL(dynClient)}/k8s/ns/mas-pipelines/tekton.dev~v1beta1~PipelineRun/mas-update-{timestamp}"
    return pipelineURL

launchAiServiceUpgradePipeline(dynClient, aiserviceInstanceId, skipPreCheck=False, aiserviceChannel='', params={})

Create a PipelineRun to upgrade the chosen AI Service instance

Source code in src/mas/devops/tekton.py
def launchAiServiceUpgradePipeline(dynClient: DynamicClient,
                                   aiserviceInstanceId: str,
                                   skipPreCheck: bool = False,
                                   aiserviceChannel: str = "",
                                   params: dict = {}) -> str:
    """
    Create a PipelineRun to upgrade the chosen AI Service instance
    """
    pipelineRunsAPI = dynClient.resources.get(api_version="tekton.dev/v1beta1", kind="PipelineRun")
    namespace = f"aiservice-{aiserviceInstanceId}-pipelines"
    timestamp = datetime.now().strftime("%y%m%d-%H%M")
    # Create the PipelineRun
    templateDir = path.join(path.abspath(path.dirname(__file__)), "templates")
    env = Environment(
        loader=FileSystemLoader(searchpath=templateDir)
    )
    template = env.get_template("pipelinerun-aiservice-upgrade.yml.j2")
    renderedTemplate = template.render(
        timestamp=timestamp,
        aiservice_instance_id=aiserviceInstanceId,
        skip_pre_check=skipPreCheck,
        aiservice_channel=aiserviceChannel,
        **params
    )
    logger.debug(renderedTemplate)
    pipelineRun = yaml.safe_load(renderedTemplate)
    pipelineRunsAPI.apply(body=pipelineRun, namespace=namespace)

    pipelineURL = f"{getConsoleURL(dynClient)}/k8s/ns/aiservice-{aiserviceInstanceId}-pipelines/tekton.dev~v1beta1~PipelineRun/{aiserviceInstanceId}-upgrade-{timestamp}"
    return pipelineURL