@@ -151,7 +151,7 @@ def is_dense(self, chunk_byte_length: int) -> bool:
151
151
152
152
# Are all non-empty offsets unique?
153
153
if len (
154
- set ( offset for offset , _ in sorted_offsets_and_lengths if offset != MAX_UINT_64 )
154
+ { offset for offset , _ in sorted_offsets_and_lengths if offset != MAX_UINT_64 }
155
155
) != len (sorted_offsets_and_lengths ):
156
156
return False
157
157
@@ -380,8 +380,8 @@ def to_dict(self) -> dict[str, JSON]:
380
380
"name" : "sharding_indexed" ,
381
381
"configuration" : {
382
382
"chunk_shape" : self .chunk_shape ,
383
- "codecs" : tuple ([ s .to_dict () for s in self .codecs ] ),
384
- "index_codecs" : tuple ([ s .to_dict () for s in self .index_codecs ] ),
383
+ "codecs" : tuple (s .to_dict () for s in self .codecs ),
384
+ "index_codecs" : tuple (s .to_dict () for s in self .index_codecs ),
385
385
"index_location" : self .index_location .value ,
386
386
},
387
387
}
@@ -477,7 +477,7 @@ async def _decode_partial_single(
477
477
)
478
478
479
479
indexed_chunks = list (indexer )
480
- all_chunk_coords = set ( chunk_coords for chunk_coords , _ , _ in indexed_chunks )
480
+ all_chunk_coords = { chunk_coords for chunk_coords , _ , _ in indexed_chunks }
481
481
482
482
# reading bytes of all requested chunks
483
483
shard_dict : ShardMapping = {}
0 commit comments