question-mark
Stuck on an issue?

Lightrun Answers was designed to reduce the constant googling that comes with debugging 3rd party libraries. It collects links to all the places you might be looking at while hunting down a tough bug.

And, if you’re still stuck at the end, we’re happy to hop on a call to see how we can help out.

bug(sdk) AddOnGroup's internal params not working with custom tasks

See original GitHub issue

/kind bug

What steps did you take and what happened:

from attr import dataclass, asdict, fields

from kfp import dsl
from kfp.components import load_component_from_text
from kfp_tekton.compiler import TektonCompiler
from kfp_tekton.tekton import AddOnGroup

CEL_TASK_IMAGE_NAME = "veryunique/image:latest"
from kfp_tekton.tekton import TEKTON_CUSTOM_TASK_IMAGES
TEKTON_CUSTOM_TASK_IMAGES = TEKTON_CUSTOM_TASK_IMAGES.append(CEL_TASK_IMAGE_NAME)


@dataclass
class ExitHandlerFields:
    status: dsl.PipelineParam
    status_message: dsl.PipelineParam

    def __getitem__(self, item: str) -> dsl.PipelineParam:
        di = asdict(self)
        return di.get(item) or di.get(item.replace('-', '_'))

class ExitHandler(AddOnGroup):
    """A custom OpsGroup which maps to a custom task"""
    def __init__(self):
        labels = {
            'pipelines.kubeflow.org/cache_enabled': 'false',
        }
        annotations = {
            'ws-pipelines.ibm.com/pipeline-cache-enabled': 'false',
        }

        super().__init__(
            kind='Exception',
            api_version='custom.tekton.dev/v1alpha1',
            params={},
            is_finally=True,
            labels=labels,
            annotations=annotations,
        )

        internal_params = {
            field.name: AddOnGroup.create_internal_param(field.name)
            for field in fields(ExitHandlerFields)
        }
        self._internal_params = internal_params
        self.fields = ExitHandlerFields(
            status = internal_params['status'],
            status_message = internal_params['status_message'],
        )

    def __enter__(self) -> ExitHandlerFields:
        super().__enter__()
        return self.fields

    def post_params(self, params: list) -> list:
        params_map = {
            param['name']: param for param in params
        }
        internal_params_names = set(self._internal_params.keys())
        params_map = {
            k: v for k, v in params_map.items()
            if k not in internal_params_names
        }
        params = list(params_map.values())
        params.append({
            'name': 'pipelinerun_name',
            'value': '$(context.pipelineRun.name)',
        })
        return params

    def post_task_spec(self, task_spec: dict) -> dict:
        spec = task_spec.get('spec') or {}

        pod_template = spec.setdefault('podTemplate', {})
        pod_template['imagePullSecrets'] = [{'name': 'ai-lifecycle'}]
        pod_template['automountServiceAccountToken'] = 'false'

        pipeline_spec = spec.get('pipelineSpec') or {}
        params = pipeline_spec.get('params') or []
        params_map = {
            param['name']: param for param in params
        }
        params_map.update({
            k: {
                'name': k,
                'type': 'string',
            } for k in self._internal_params.keys()
        })
        params = list(params_map.values())
        spec = task_spec.setdefault('spec', {})
        spec.setdefault('pipelineSpec', {})['params'] = params
        task_spec['spec'] = spec
        return task_spec

def PrintOp(name: str, msg: str = None):
  if msg is None:
    msg = name
  print_op = load_component_from_text(
  """
  name: %s
  inputs:
  - {name: input_text, type: String, description: 'Represents an input parameter.'}
  outputs:
  - {name: output_value, type: String, description: 'Represents an output paramter.'}
  implementation:
    container:
      image: alpine:3.6
      command:
      - sh
      - -c
      - |
        set -e
        echo $0 > $1
      - {inputValue: input_text}
      - {outputPath: output_value}
  """ % name
  )
  return print_op(msg)

@dsl.pipeline("test pipeline")
def test_pipeline():
    with ExitHandler() as it:
        #PrintOp("print-err-status", it.status)
        cel = load_component_from_text(r"""
            name: cel
            inputs:
            - {name: cel-input}
            outputs:
            - {name: cel-output}
            implementation:
              container:
                image: veryunique/image:latest
                command: [cel]
                args:
                - --apiVersion
                - custom.tekton.dev/v1alpha1
                - --kind
                - Cel
                - --name
                - cel_123
                - --status
                - {inputValue: cel-input}
                - --taskSpec
                - '{}'
                fileOutputs: {}
        """)(it.status)
        cel.add_pod_annotation("valid_container", "false")

    PrintOp("print", "some-message")

if __name__ == '__main__':
  TektonCompiler().compile(test_pipeline, __file__.replace('.py', '.yaml'))

It fails with:

Traceback (most recent call last):
  File "(...)/addongroup_internal_params.py", line 152, in <module>
    TektonCompiler().compile(test_pipeline, __file__.replace('.py', '.yaml'))
  File "(...)/kfp-tekton/sdk/python/kfp_tekton/compiler/compiler.py", line 1560, in compile
    super().compile(pipeline_func, package_path, type_check, pipeline_conf=pipeline_conf)
  File "/Users/michalina.kotwica@ibm.com/PycharmProjects/orchestration_flow_translator/src/kfp/sdk/python/kfp/compiler/compiler.py", line 1175, in compile
    self._create_and_write_workflow(
  File "(...)/kfp-tekton/sdk/python/kfp_tekton/compiler/compiler.py", line 1714, in _create_and_write_workflow
    pipeline_loop_crs, workflow = self.prepare_workflow(
  File "(...)/kfp-tekton/sdk/python/kfp_tekton/compiler/compiler.py", line 1625, in prepare_workflow
    workflow = self._create_workflow(
  File "(...)/kfp-tekton/sdk/python/kfp_tekton/compiler/compiler.py", line 1515, in _create_workflow
    workflow = self._create_pipeline_workflow(
  File "(...)/kfp-tekton/sdk/python/kfp_tekton/compiler/compiler.py", line 1231, in _create_pipeline_workflow
    substitute_param = '$(tasks.%s.results.%s)' % (sanitize_k8s_name(pp.op_name), sanitize_k8s_name(pp.name))
  File "(...)/kfp-tekton/sdk/python/kfp_tekton/compiler/_k8s_helper.py", line 51, in sanitize_k8s_name
    raise ex
  File "(...)/kfp-tekton/sdk/python/kfp_tekton/compiler/_k8s_helper.py", line 47, in sanitize_k8s_name
    k8s_name = re.sub('[^-_./0-9A-Za-z]+', '-', name)
  File "(...)/3.9/lib/python3.9/re.py", line 210, in sub
    return _compile(pattern, flags).sub(repl, string, count)
TypeError: expected string or bytes-like object

…but if the custom-task is replaced by a “normal” task, which uses the same status param (the commented line), it works as expected, with no error.

What did you expect to happen: Both tasks and custom-tasks being able to accept an internal parameter from AddOnGroup.

Additional information: None found.

Environment:

  • Python Version (use python --version): 3.9.0
  • SDK Version: 7d9f8c9a30e84f1bbaa6bfc09e0671c5939b79b0
  • Tekton Version (use tkn version): irrelevant
  • Kubernetes Version (use kubectl version): irrelevant
  • OS (e.g. from /etc/os-release): irrelevant

Issue Analytics

  • State:closed
  • Created a year ago
  • Comments:6 (6 by maintainers)

github_iconTop GitHub Comments

1reaction
Tomclicommented, Jul 25, 2022

@Udiknedormin I think you used the DSL incorrectly, you should set {inputValue: cel-input} since that component has no input parameter status. status is the the output name from the exit handler, but it you didn’t define it as the input name for cel custom task.

Read more comments on GitHub >

github_iconTop Results From Across the Web

[sdk] dsl.ExitHandler error: bad replacement for`{{workflow ...
Environment KFP version: 1.4.1 KFP SDK version: 1.4.0 All ... bug(sdk) AddOnGroup's internal params not working with custom tasks ...
Read more >
Developing Custom Gradle Task Types
Here is a simple build script for the project. It applies the Groovy plugin, and adds the Gradle API as a compile-time dependency....
Read more >
Running a gradle task from inside a custom task
Problem: When I run my custom task, it throws an error that. org.gradle.api.InvalidUserDataException: Cannot configure the 'publishing' ...
Read more >
Implement Custom Tasks in Your Skill | Alexa Skills Kit
A skill provider can create a custom task that a requester skill can use. The requester skill can choose which provider should fulfill...
Read more >
Creating a task | Ethereum development environment for ...
The Config API task function returns an object with methods to define all of them. Once defined, Hardhat takes control of parsing parameters, ......
Read more >

github_iconTop Related Medium Post

No results found

github_iconTop Related StackOverflow Question

No results found

github_iconTroubleshoot Live Code

Lightrun enables developers to add logs, metrics and snapshots to live code - no restarts or redeploys required.
Start Free

github_iconTop Related Reddit Thread

No results found

github_iconTop Related Hackernoon Post

No results found

github_iconTop Related Tweet

No results found

github_iconTop Related Dev.to Post

No results found

github_iconTop Related Hashnode Post

No results found