Skip to content

AI update based on proto changes #953

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 24 additions & 7 deletions src/viam/app/data_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@ async def tabular_data_by_filter(
if dest:
try:
file = open(dest, "w")
file.write(f"{[str(d) for d in data]}")
file.write(f"{[[str(d) for d in data]}")
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

AI hallucination

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
file.write(f"{[[str(d) for d in data]}")
file.write(f"{[str(d) for d in data]}")

file.flush()
except Exception as e:
LOGGER.error(f"Failed to write tabular data to file {dest}", exc_info=e)
Expand Down Expand Up @@ -760,7 +760,7 @@ async def binary_data_by_filter(
if dest:
try:
file = open(dest, "w")
file.write(f"{[str(d) for d in data]}")
file.write(f"{response.data}")
file.flush()
except Exception as e:
LOGGER.error(f"Failed to write binary data to file {dest}", exc_info=e)
Expand Down Expand Up @@ -1485,6 +1485,7 @@ async def binary_data_capture_upload(
method_parameters: Optional[Mapping[str, Any]] = None,
tags: Optional[List[str]] = None,
data_request_times: Optional[Tuple[datetime, datetime]] = None,
dataset_ids: Optional[List[str]] = None,
) -> str:
"""Upload binary sensor data.
Expand Down Expand Up @@ -1519,6 +1520,7 @@ async def binary_data_capture_upload(
or ``.png`` extension will appear in the **Images** tab.
method_parameters (Optional[Mapping[str, Any]]): Optional dictionary of method parameters. No longer in active use.
tags (Optional[List[str]]): Optional list of tags to allow for tag-based data filtering when retrieving data.
dataset_ids (Optional[List[str]]): Optional list of dataset IDs to associate with the uploaded data.
data_request_times (Optional[Tuple[datetime.datetime, datetime.datetime]]): Optional tuple containing datetime objects
denoting the times this data was requested ``[0]`` by the robot and received ``[1]`` from the appropriate sensor.
Expand Down Expand Up @@ -1550,6 +1552,7 @@ async def binary_data_capture_upload(
type=DataType.DATA_TYPE_BINARY_SENSOR,
method_parameters=method_parameters,
tags=tags,
dataset_ids=dataset_ids,
)
if file_extension:
metadata.file_extension = file_extension if file_extension[0] == "." else f".{file_extension}"
Expand All @@ -1566,6 +1569,7 @@ async def tabular_data_capture_upload(
data_request_times: List[Tuple[datetime, datetime]],
method_parameters: Optional[Mapping[str, Any]] = None,
tags: Optional[List[str]] = None,
dataset_ids: Optional[List[str]] = None,
) -> str:
"""Upload tabular sensor data.
Expand All @@ -1585,12 +1589,14 @@ async def tabular_data_capture_upload(
method_name='Readings',
tags=["sensor_data"],
data_request_times=[(time_requested, time_received)],
tabular_data=[{
'readings': {
'linear_velocity': {'x': 0.5, 'y': 0.0, 'z': 0.0},
'angular_velocity': {'x': 0.0, 'y': 0.0, 'z': 0.1}
tabular_data=[
{
'readings': {
'linear_velocity': {'x': 0.5, 'y': 0.0, 'z': 0.0},
'angular_velocity': {'x': 0.0, 'y': 0.0, 'z': 0.1}
}
Comment on lines +1592 to +1597
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
tabular_data=[
{
'readings': {
'linear_velocity': {'x': 0.5, 'y': 0.0, 'z': 0.0},
'angular_velocity': {'x': 0.0, 'y': 0.0, 'z': 0.1}
}
tabular_data=[{
'readings': {
'linear_velocity': {'x': 0.5, 'y': 0.0, 'z': 0.0},
'angular_velocity': {'x': 0.0, 'y': 0.0, 'z': 0.1}

}
}]
]
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
]
}]

ai hallucination

)
Args:
Expand All @@ -1605,6 +1611,7 @@ async def tabular_data_capture_upload(
Pass a list of tabular data and timestamps with length ``n > 1`` to upload ``n`` datapoints, all with the same metadata.
method_parameters (Optional[Mapping[str, Any]]): Optional dictionary of method parameters. No longer in active use.
tags (Optional[List[str]]): Optional list of tags to allow for tag-based data filtering when retrieving data.
dataset_ids (Optional[List[str]]): Optional list of dataset IDs to associate with the uploaded data.
Raises:
GRPCError: If an invalid part ID is passed.
Expand Down Expand Up @@ -1647,6 +1654,7 @@ async def tabular_data_capture_upload(
type=DataType.DATA_TYPE_TABULAR_SENSOR,
method_parameters=method_parameters,
tags=tags,
dataset_ids=dataset_ids,
)
response = await self._data_capture_upload(metadata=metadata, sensor_contents=sensor_contents)
return response.file_id
Expand All @@ -1667,6 +1675,7 @@ async def streaming_data_capture_upload(
method_parameters: Optional[Mapping[str, Any]] = None,
data_request_times: Optional[Tuple[datetime, datetime]] = None,
tags: Optional[List[str]] = None,
dataset_ids: Optional[List[str]] = None,
) -> str:
"""Uploads the metadata and contents of streaming binary data.
Expand Down Expand Up @@ -1697,6 +1706,7 @@ async def streaming_data_capture_upload(
data_request_times (Optional[Tuple[datetime.datetime, datetime.datetime]]): Optional tuple containing datetime objects
denoting the times this data was requested ``[0]`` by the robot and received ``[1]`` from the appropriate sensor.
tags (Optional[List[str]]): Optional list of tags to allow for tag-based filtering when retrieving data.
dataset_ids (Optional[List[str]]): Optional list of dataset IDs to associate with the uploaded data.
Raises:
GRPCError: If an invalid part ID is passed.
Expand All @@ -1716,6 +1726,7 @@ async def streaming_data_capture_upload(
type=DataType.DATA_TYPE_BINARY_SENSOR,
file_extension=file_ext if file_ext[0] == "." else f".{file_ext}",
tags=tags,
dataset_ids=dataset_ids,
)
sensor_metadata = SensorMetadata(
time_requested=datetime_to_timestamp(data_request_times[0]) if data_request_times else None,
Expand Down Expand Up @@ -1744,6 +1755,7 @@ async def file_upload(
method_parameters: Optional[Mapping[str, Any]] = None,
file_extension: Optional[str] = None,
tags: Optional[List[str]] = None,
dataset_ids: Optional[List[str]] = None,
) -> str:
"""Upload arbitrary file data.
Expand Down Expand Up @@ -1772,6 +1784,7 @@ async def file_upload(
file_extension (Optional[str]): Optional file extension. The empty string ``""`` will be assigned as the file extension if one
isn't provided. Files with a ``.jpeg``, ``.jpg``, or ``.png`` extension will be saved to the **Images** tab.
tags (Optional[List[str]]): Optional list of tags to allow for tag-based filtering when retrieving data.
dataset_ids (Optional[List[str]]): Optional list of dataset IDs to associate with the uploaded data.
Raises:
GRPCError: If an invalid part ID is passed.
Expand All @@ -1791,6 +1804,7 @@ async def file_upload(
method_parameters=method_parameters,
file_extension=file_extension if file_extension else "",
tags=tags,
dataset_ids=dataset_ids,
)
response: FileUploadResponse = await self._file_upload(metadata=metadata, file_contents=FileData(data=data))
return response.binary_data_id
Expand All @@ -1804,6 +1818,7 @@ async def file_upload_from_path(
method_name: Optional[str] = None,
method_parameters: Optional[Mapping[str, Any]] = None,
tags: Optional[List[str]] = None,
dataset_ids: Optional[List[str]] = None,
) -> str:
"""Upload arbitrary file data.
Expand All @@ -1826,6 +1841,7 @@ async def file_upload_from_path(
method_name (Optional[str]): Optional name of the method associated with the file.
method_parameters (Optional[str]): Optional dictionary of the method parameters. No longer in active use.
tags (Optional[List[str]]): Optional list of tags to allow for tag-based filtering when retrieving data.
dataset_ids (Optional[List[str]]): Optional list of dataset IDs to associate with the uploaded data.
Raises:
Expand Down Expand Up @@ -1854,6 +1870,7 @@ async def file_upload_from_path(
method_parameters=method_parameters,
file_extension=file_extension if file_extension else "",
tags=tags,
dataset_ids=dataset_ids,
)
response: FileUploadResponse = await self._file_upload(metadata=metadata, file_contents=FileData(data=data if data else bytes()))
return response.binary_data_id
Expand Down
8 changes: 8 additions & 0 deletions tests/test_data_sync_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
SECONDS_END = 1689256810
NANOS_END = 10
TAGS = ["tag"]
DATASET_IDS = ["dataset_id"]
BINARY_DATA = b"binary_data"
METHOD_NAME = "method_name"
DATETIMES = (datetime.now(), datetime.now())
Expand Down Expand Up @@ -52,6 +53,7 @@ async def test_binary_data_capture_upload(self, service: MockDataSync):
data_request_times=DATETIMES,
binary_data=BINARY_DATA,
file_extension=".txt",
dataset_ids=DATASET_IDS,
)
self.assert_sensor_contents(sensor_contents=list(service.sensor_contents), is_binary=True)
self.assert_metadata(metadata=service.metadata)
Expand All @@ -69,6 +71,7 @@ async def test_binary_data_capture_upload(self, service: MockDataSync):
data_request_times=DATETIMES,
binary_data=BINARY_DATA,
file_extension="txt",
dataset_ids=DATASET_IDS,
)
assert service.metadata.file_extension == ".txt"

Expand All @@ -84,6 +87,7 @@ async def test_tabular_data_capture_upload(self, service: MockDataSync):
tags=TAGS,
data_request_times=[DATETIMES],
tabular_data=cast(List[Mapping[str, Any]], TABULAR_DATA),
dataset_ids=DATASET_IDS,
)
self.assert_sensor_contents(sensor_contents=list(service.sensor_contents), is_binary=False)
self.assert_metadata(metadata=service.metadata)
Expand All @@ -102,6 +106,7 @@ async def test_file_upload(self, service: MockDataSync):
file_extension=FILE_EXT,
tags=TAGS,
data=BINARY_DATA,
dataset_ids=DATASET_IDS,
)
assert file_id == FILE_UPLOAD_RESPONSE
self.assert_metadata(service.metadata)
Expand All @@ -122,6 +127,7 @@ async def test_file_upload_from_path(self, service: MockDataSync, tmp_path):
method_parameters=METHOD_PARAMETERS,
tags=TAGS,
filepath=path.resolve(),
dataset_ids=DATASET_IDS,
)
assert file_id == FILE_UPLOAD_RESPONSE
self.assert_metadata(service.metadata)
Expand All @@ -142,6 +148,7 @@ async def test_streaming_data_capture_upload(self, service: MockDataSync):
method_parameters=METHOD_PARAMETERS,
data_request_times=DATETIMES,
tags=TAGS,
dataset_ids=DATASET_IDS,
)
assert file_id == FILE_UPLOAD_RESPONSE
self.assert_metadata(service.metadata)
Expand All @@ -166,3 +173,4 @@ def assert_metadata(self, metadata: UploadMetadata) -> None:
assert metadata.method_name == METHOD_NAME
assert metadata.method_parameters == METHOD_PARAMETERS
assert metadata.tags == TAGS
assert metadata.dataset_ids == DATASET_IDS