You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
This pattern can be replaced by a more comprehensive parameter evaluation that is uniform across all APIs (CLI and Python). Here is what needs to be done (look for XXX comments in the diff):
diff --git a/datalad_example/command.py b/datalad_example/command.py
index 261d8a9..0a41f85 100644
--- a/datalad_example/command.py+++ b/datalad_example/command.py@@ -21,38 +21,34 @@ from pathlib import (
)
from shutil import copyfile
-# XXX no longer needed, replaced by a new EnsureDataset implementation-from datalad.distribution.dataset import (- EnsureDataset,- require_dataset,-)
from datalad_next.commands import (
- Interface,+ # XXX new interface class and joint validator+ EnsureCommandParameterization,+ ValidatedInterface,
build_doc,
datasetmethod,
)
from datalad.interface.common_opts import (
recursion_limit,
recursion_flag,
)
+# XXX will be available from `datalad_next.constraints` with v1.3+from datalad_next.constraints.dataset import (+ EnsureDataset,+)
@build_doc
-class Command(Interface):+# XXX swap out base class, enabled new-style parameter validation+class Command(ValidatedInterface):
"""Export a dataset to a Bag-it
This is a proof-of-principle implementation that can export a DataLad
@@ -85,8 +81,7 @@ class ExportBagit(Interface):
_params_ = dict(
dataset=Parameter(
args=("-d", "--dataset"),
- doc="""specify the dataset to export""",- # XXX remove parameter constraint specification,- # these are only active for the CLI- constraints=EnsureDataset() | EnsureNone()),+ doc="""specify the dataset to export"""),
to=Parameter(
args=("to",),
metavar='PATH',
@@ -102,6 +97,13 @@ class ExportBagit(Interface):
recursion_limit=recursion_limit,
)
+ # XXX declare ALL constraints in a dedicated/specialized meta-constraint+ # for join validation (here only done for `dataset`, but can take any+ # number, and can also validate interactions between parameters, and+ # perform arbitrary parameter value transformation on the joint set)+ _validator_ = EnsureCommandParameterization(+ param_constraints=dict(+ # XXX place individual constraints here, pretty much as before+ dataset=EnsureDataset(installed=True),+ ),+ # XXX declare any parameters that should have their default values+ # processed too (here we want to act on `None` and discover a dataset)+ validate_defaults=('dataset',),+ )+
@staticmethod
@datasetmethod(name='command')
@eval_results
@@ -112,10 +114,7 @@ class Command(Interface):
recursive=False,
recursion_limit=None):
- ds = require_dataset(- dataset,- check_installed=True,- purpose='exporting to BagIt')+ # XXX at this point the dataset has already been discovered and+ # and a Dataset instance was created, or an error message was given.+ # The original argument value is accessible as dataset.original+ # See https://docs.datalad.org/projects/next/en/latest/generated/generated/datalad_next.constraints.DatasetParameter.html+ ds = dataset.ds
res_kwargs = dict(
action='export_bagit',
The text was updated successfully, but these errors were encountered:
Traditionally, commands operating on datasets use these two code pieces.
In the command API declaration, the constraint is often used as such
In the body of the command function, it is acted up like this (more or less):
This pattern can be replaced by a more comprehensive parameter evaluation that is uniform across all APIs (CLI and Python). Here is what needs to be done (look for XXX comments in the diff):
The text was updated successfully, but these errors were encountered: