Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

setuptools.setup(
name="DroidRpc",
version="1.3.4",
version="1.4.2",
description="Python client for connecting to LORA Technologies' bot services.",
long_description=long_description,
long_description_content_type="text/markdown",
Expand Down
16 changes: 9 additions & 7 deletions src/DroidRpc/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,19 @@ class Client:

batch_size = 400

def __init__(self, address: str = "guardian", port: str = "50065"):
def __init__(self,
address: str = "guardian",
port: str = "50065",
batch_size: int = None):
Comment on lines 33 to +38
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why not put batch_size: int = 400 instead of None? Then you don't need the if statement below

self.address = address
self.port = port
# TODO: Use a secure channel because this is external facing
self.channel = grpc.insecure_channel(self.address + ":" + self.port)
self.droid = bot_pb2_grpc.DroidStub(
self.channel
) # This one contains the bistream
if batch_size is not None:
self.batch_size = batch_size
Comment on lines +46 to +47
Copy link
Contributor

@WilliamG-LORA WilliamG-LORA Oct 11, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is not needed if you set the default to 400 in the init


def __string_to_datetime(self, date: str):
date = datetime.strptime(date, "%Y-%m-%d")
Expand Down Expand Up @@ -85,8 +90,7 @@ def __create_bots_generator(self, input_matrix: np.ndarray):
"""

# Split input matrix into smaller batches
batch_size = 400
splits = math.ceil(input_matrix.shape[1] / batch_size)
splits = math.ceil(input_matrix.shape[1] / self.batch_size)
input_matrix = np.array_split(input_matrix, splits, axis=1)

for batch in input_matrix:
Expand Down Expand Up @@ -296,8 +300,7 @@ def __hedge_bots_generator(self, input_matrix: np.array):
"""

# Split input matrix into smaller batches
batch_size = 400
splits = math.ceil(input_matrix.shape[1] / batch_size)
splits = math.ceil(input_matrix.shape[1] / self.batch_size)
input_matrix = np.array_split(input_matrix, splits, axis=1)

for batch in input_matrix:
Expand Down Expand Up @@ -455,8 +458,7 @@ def __stop_bots_generator(
self,
input_matrix: np.ndarray,
):
batch_size = 400
splits = math.ceil(input_matrix.shape[1] / batch_size)
splits = math.ceil(input_matrix.shape[1] / self.batch_size)
input_matrix = np.array_split(input_matrix, splits, axis=1)

for batch in input_matrix:
Expand Down
Loading