Skip to content

Commit

Permalink
fix use of to_json() instead of to_dict()
Browse files Browse the repository at this point in the history
  • Loading branch information
christian-monch committed Feb 27, 2023
1 parent f60e7f2 commit 31d87d4
Show file tree
Hide file tree
Showing 6 changed files with 25 additions and 15 deletions.
8 changes: 4 additions & 4 deletions datalad_metalad/conduct.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ def process_parallel(executor,
status="ok",
path=str(path),
logger=lgr,
pipeline_data=pipeline_data.to_json())
pipeline_data=pipeline_data.to_dict())
continue

lgr.debug(f"Starting new instance of {processor_specs[0]} on {pipeline_data}")
Expand Down Expand Up @@ -343,7 +343,7 @@ def process_parallel(executor,
status="ok",
path=str(path),
logger=lgr,
pipeline_data=pipeline_data.to_json())
pipeline_data=pipeline_data.to_dict())
else:
lgr.debug(
f"Starting processor[{next_index}]"
Expand Down Expand Up @@ -396,7 +396,7 @@ def process_parallel(executor,
status="ok",
path=str(path),
logger=lgr,
pipeline_data=pipeline_data.to_json())
pipeline_data=pipeline_data.to_dict())
else:
lgr.debug(
f"Handing pipeline data {pipeline_data} to"
Expand Down Expand Up @@ -500,7 +500,7 @@ def process_downstream(pipeline_data: PipelineData,
status="ok",
path=str(path),
logger=lgr,
pipeline_data=pipeline_data.to_json())
pipeline_data=pipeline_data.to_dict())

lgr.debug(
f"Pipeline finished, returning datalad result {datalad_result}")
Expand Down
16 changes: 11 additions & 5 deletions datalad_metalad/pipeline/pipelinedata.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,11 @@ class PipelineDataState(Enum):
CONTINUE = "continue"
STOP = "stop"

def to_dict(self) -> str:
return self.name

def to_json(self):
return f'"{self.name}"'
return json.dumps(self.to_dict())


@dataclass
Expand Down Expand Up @@ -94,8 +97,8 @@ def __str__(self):
"result": self._result
})

def to_json(self) -> Dict:
json_obj = {
def to_dict(self) -> Dict:
obj = {
"state": self.state.name,
"result": {
key: [
Expand All @@ -106,5 +109,8 @@ def to_json(self) -> Dict:
if key not in ("path",)
}
}
json_obj["result"]["path"] = str(self._result["path"])
return json_obj
obj["result"]["path"] = str(self._result["path"])
return obj

def to_json(self) -> str:
return json.dumps(self.to_dict())
2 changes: 1 addition & 1 deletion datalad_metalad/pipeline/processor/add.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class MetadataAddResult(PipelineResult):

def to_json(self) -> Dict:
return {
**super().to_json(),
**super().to_dict(),
"path": str(self.path)
}

Expand Down
2 changes: 1 addition & 1 deletion datalad_metalad/pipeline/processor/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def process(self, pipeline_data: PipelineData) -> PipelineData:
extractorname=self.extractor_name,
dataset=dataset_path,
path=intra_dataset_path,
file_info=dataset_traverse_record.element_info.to_json(),
file_info=dataset_traverse_record.element_info.to_dict(),
result_renderer="disabled")
elif object_type == "dataset":
kwargs = dict(
Expand Down
2 changes: 1 addition & 1 deletion datalad_metalad/pipeline/processor/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class MetadataFilterResult(PipelineResult):

def to_json(self) -> Dict:
return {
**super().to_json(),
**super().to_dict(),
"path": str(self.path),
"metadata_record": self.metadata_record
}
Expand Down
10 changes: 7 additions & 3 deletions datalad_metalad/tests/test_conduct.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,12 +109,15 @@ class ConductTestResult(PipelineResult):
class StringResult(PipelineResult):
content: str

def to_json(self) -> Dict:
def to_dict(self) -> Dict:
return {
**super().to_json(),
**super().to_dict(),
"content": self.content
}

def to_json(self) -> str:
return json.dumps(self.to_dict())


class ConductTestTraverser(Provider):
def __init__(self, path_spec: str):
Expand Down Expand Up @@ -247,7 +250,8 @@ def test_multiple_adder():
]
))
],
configuration=adder_pipeline))
configuration=adder_pipeline,
processing_mode="sequential"))

eq_(len(pipeline_results), 1)
result = pipeline_results[0]
Expand Down

0 comments on commit 31d87d4

Please sign in to comment.