Skip to content

Commit df9c04f

Browse files
author
Austin Zielman
committed
Version 1.2.4
1 parent 9f18560 commit df9c04f

File tree

306 files changed

+3001
-1502
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

306 files changed

+3001
-1502
lines changed

abacusai/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,4 @@
44
from .streaming_client import StreamingClient
55

66

7-
__version__ = "1.2.2"
7+
__version__ = "1.2.4"

abacusai/agent.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ class Agent(AbstractApiClass):
2323
agentExecutionConfig (dict): The config for arguments used to execute the agent.
2424
latestAgentVersion (AgentVersion): The latest agent version.
2525
codeSource (CodeSource): If a python model, information on the source code
26+
workflowGraph (WorkflowGraph): The workflow graph for the agent.
2627
"""
2728

2829
def __init__(self, client, name=None, agentId=None, createdAt=None, projectId=None, notebookId=None, predictFunctionName=None, sourceCode=None, agentConfig=None, memory=None, trainingRequired=None, agentExecutionConfig=None, codeSource={}, latestAgentVersion={}, workflowGraph={}):

abacusai/agent_data_upload_result.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
from .agent_data_document_info import AgentDataDocumentInfo
21
from .return_class import AbstractApiClass
32

43

@@ -8,12 +7,12 @@ class AgentDataUploadResult(AbstractApiClass):
87
98
Args:
109
client (ApiClient): An authenticated API Client instance
11-
docInfos (AgentDataDocumentInfo): A list of dict for information on the documents uploaded to agent.
10+
docInfos (list[agentdatadocumentinfo]): A list of dict for information on the documents uploaded to agent.
1211
"""
1312

14-
def __init__(self, client, docInfos={}):
13+
def __init__(self, client, docInfos=None):
1514
super().__init__(client, None)
16-
self.doc_infos = client._build_class(AgentDataDocumentInfo, docInfos)
15+
self.doc_infos = docInfos
1716
self.deprecated_keys = {}
1817

1918
def __repr__(self):
@@ -30,5 +29,5 @@ def to_dict(self):
3029
Returns:
3130
dict: The dict value representation of the class parameters
3231
"""
33-
resp = {'doc_infos': self._get_attribute_as_dict(self.doc_infos)}
32+
resp = {'doc_infos': self.doc_infos}
3433
return {key: value for key, value in resp.items() if value is not None and key not in self.deprecated_keys}

abacusai/agent_version.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ class AgentVersion(AbstractApiClass):
2020
error (str): Relevant error if the status is FAILED.
2121
agentExecutionConfig (dict): The config for arguments used to execute the agent.
2222
codeSource (CodeSource): If a python model, information on where the source code is located.
23+
workflowGraph (WorkflowGraph): The workflow graph for the agent.
2324
"""
2425

2526
def __init__(self, client, agentVersion=None, status=None, agentId=None, agentConfig=None, publishingStartedAt=None, publishingCompletedAt=None, pendingDeploymentIds=None, failedDeploymentIds=None, error=None, agentExecutionConfig=None, codeSource={}, workflowGraph={}):

abacusai/api_class/abstract.py

Lines changed: 38 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,8 @@
44
import re
55
from abc import ABC
66
from copy import deepcopy
7-
from typing import Any
7+
from textwrap import dedent
8+
from typing import Any, Callable, get_origin, get_type_hints
89

910
from .enums import ApiEnum
1011

@@ -34,6 +35,16 @@ def snake_case(value):
3435
return value
3536

3637

38+
def get_clean_function_source_code(func: Callable):
39+
sample_lambda = (lambda: 0)
40+
if isinstance(func, type(sample_lambda)) and func.__name__ == sample_lambda.__name__:
41+
raise ValueError('Lambda function not allowed.')
42+
source_code = inspect.getsource(func)
43+
# If function source code has some initial indentation, remove it (Ex - can happen if the functor was defined inside a function)
44+
source_code = dedent(source_code)
45+
return source_code
46+
47+
3748
@dataclasses.dataclass
3849
class ApiClass(ABC):
3950
_upper_snake_case_keys: bool = dataclasses.field(default=False, repr=False, init=False)
@@ -111,12 +122,12 @@ def to_dict_helper(api_class_obj):
111122
res = {}
112123
api_class_dict = vars(api_class_obj)
113124
if self._support_kwargs:
114-
kwargs = api_class_dict.pop('kwargs', None)
125+
kwargs = api_class_dict.get('kwargs', None)
115126
api_class_dict.update(kwargs or {})
116127
for k, v in api_class_dict.items():
117-
if not k.startswith('__'):
118-
k = upper_snake_case(k) if self._upper_snake_case_keys else camel_case(k)
119-
if v is not None:
128+
if v is not None and k != 'kwargs':
129+
if not k.startswith('__'):
130+
k = upper_snake_case(k) if self._upper_snake_case_keys else camel_case(k)
120131
if isinstance(v, ApiClass):
121132
res[k] = to_dict_helper(v)
122133
elif isinstance(v, list):
@@ -125,17 +136,19 @@ def to_dict_helper(api_class_obj):
125136
res[k] = {key: to_dict_helper(val) if isinstance(val, ApiClass) else val for key, val in v.items()}
126137
elif isinstance(v, datetime.datetime) or isinstance(v, datetime.date):
127138
res[k] = v.isoformat() if v else v
139+
elif isinstance(v, ApiEnum):
140+
res[k] = v.value
128141
else:
129-
if isinstance(v, ApiEnum):
130-
res[k] = v.value
131-
else:
132-
res[k] = v
142+
res[k] = v
133143
return res
134144

135145
return to_dict_helper(self)
136146

137147
@classmethod
138148
def from_dict(cls, input_dict: dict):
149+
if input_dict is None:
150+
return None
151+
obj = None
139152
if input_dict:
140153
if builder := cls._get_builder():
141154
config_class_key = None
@@ -152,14 +165,28 @@ def from_dict(cls, input_dict: dict):
152165
if config_class_key not in input_dict_with_config_key and camel_case(config_class_key) not in input_dict_with_config_key:
153166
input_dict_with_config_key[config_class_key] = value
154167

155-
return builder.from_dict(input_dict_with_config_key)
168+
obj = builder.from_dict(input_dict_with_config_key)
169+
156170
if not cls._upper_snake_case_keys:
157171
input_dict = {snake_case(k): v for k, v in input_dict.items()}
158172
if not cls._support_kwargs:
159173
# only use keys that are valid fields in the ApiClass
160174
field_names = set((field.name) for field in dataclasses.fields(cls))
161175
input_dict = {k: v for k, v in input_dict.items() if k in field_names}
162-
return cls(**input_dict)
176+
if obj is None:
177+
obj = cls(**input_dict)
178+
179+
for attr_name, attr_type in get_type_hints(cls).items():
180+
if attr_name in input_dict and inspect.isclass(attr_type) and issubclass(attr_type, ApiClass):
181+
setattr(obj, attr_name, attr_type.from_dict(input_dict[attr_name]))
182+
elif attr_name in input_dict and get_origin(attr_type) is list and attr_type.__args__ and inspect.isclass(attr_type.__args__[0]) and issubclass(attr_type.__args__[0], ApiClass):
183+
class_type = attr_type.__args__[0]
184+
if isinstance(input_dict[attr_name], list):
185+
setattr(obj, attr_name, [class_type.from_dict(item) for item in input_dict[attr_name]])
186+
else:
187+
raise ValueError(f'Expected list for {attr_name} but got {type(input_dict[attr_name])}')
188+
189+
return obj
163190

164191

165192
@dataclasses.dataclass

abacusai/api_class/ai_agents.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from typing import List, Union
33

44
from . import enums
5-
from .abstract import ApiClass
5+
from .abstract import ApiClass, get_clean_function_source_code
66

77

88
@dataclasses.dataclass
@@ -14,7 +14,7 @@ class FieldDescriptor(ApiClass):
1414
field (str): The field to be extracted. This will be used as the key in the response.
1515
description (str): The description of this field. If not included, the response_field will be used.
1616
example_extraction (Union[str, int, bool, float]): An example of this extracted field.
17-
type (enums.FieldDescriptorType): The type of this field. If not provided, the default type is STRING.
17+
type (FieldDescriptorType): The type of this field. If not provided, the default type is STRING.
1818
"""
1919
field: str = dataclasses.field()
2020
description: str = dataclasses.field(default=None)
@@ -29,20 +29,23 @@ class WorkflowNodeInputMapping(ApiClass):
2929
3030
Args:
3131
name (str): The name of the input.
32-
variable_type (str): The type of the input.
33-
workflow_variable_source (str): The workflow source stage of the input.
32+
variable_type (WorkflowNodeInputType): The type of the input.
33+
variable_source (str): The name of the node this variable is sourced from.
34+
If the type is `WORKFLOW_VARIABLE`, the value given by the source node will be directly used.
35+
If the type is `USER_INPUT`, the value given by the source node will be used as the default initial value before user edits it.
36+
Set to `None` if the type is `USER_INPUT` and the variable doesn't need a pre-filled initial value.
3437
is_required (bool): Whether the input is required.
3538
"""
3639
name: str
3740
variable_type: enums.WorkflowNodeInputType
38-
workflow_variable_source: str = dataclasses.field(default=None)
41+
variable_source: str = dataclasses.field(default=None)
3942
is_required: bool = dataclasses.field(default=True)
4043

4144
def to_dict(self):
4245
return {
4346
'name': self.name,
4447
'variable_type': self.variable_type,
45-
'workflow_variable_source': self.workflow_variable_source,
48+
'variable_source': self.variable_source,
4649
'is_required': self.is_required
4750
}
4851

@@ -54,7 +57,7 @@ class WorkflowNodeOutputMapping(ApiClass):
5457
5558
Args:
5659
name (str): The name of the output.
57-
variable_type (str): The type of the output.
60+
variable_type (WorkflowNodeOutputType): The type of the output.
5861
"""
5962
name: str
6063
variable_type: enums.WorkflowNodeOutputType = dataclasses.field(default=enums.WorkflowNodeOutputType.STRING)
@@ -72,22 +75,19 @@ class WorkflowGraphNode(ApiClass):
7275
A node in an Agent workflow graph.
7376
7477
Args:
75-
name (str): Display name of the worflow node.
78+
name (str): A unique name for the workflow node.
7679
input_mappings (List[WorkflowNodeInputMapping]): List of input mappings for the node.
7780
output_mappings (List[WorkflowNodeOutputMapping]): List of output mappings for the node.
78-
function (callable): The callable node function reference if available.
79-
function_name (str): The name of the function if available.
80-
source_code (str): The source code of the function if available.
81+
function (callable): The callable node function reference.
8182
input_schema (dict): The react json schema for the input form if applicable.
8283
output_schema (dict): The react json schema for the output if applicable.
8384
package_requirements (list): List of package requirements for the node.
8485
"""
8586

8687
def __init__(self, name: str, input_mappings: List[WorkflowNodeInputMapping], output_mappings: List[WorkflowNodeOutputMapping], function: callable = None, function_name: str = None, source_code: str = None, input_schema: dict = None, output_schema: dict = None, package_requirements: list = None):
8788
if function:
88-
import inspect
8989
self.function_name = function.__name__
90-
self.source_code = inspect.getsource(function)
90+
self.source_code = get_clean_function_source_code(function)
9191
elif function_name and source_code:
9292
self.function_name = function_name
9393
self.source_code = source_code
@@ -133,8 +133,8 @@ class WorkflowGraphEdge(ApiClass):
133133
An edge in an Agent workflow graph.
134134
135135
Args:
136-
source (str): The source node of the edge.
137-
target (str): The target node of the edge.
136+
source (str): The name of the source node of the edge.
137+
target (str): The name of the target node of the edge.
138138
details (dict): Additional details about the edge.
139139
"""
140140
source: str

abacusai/api_class/batch_prediction.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ class ForecastingBatchPredictionArgs(BatchPredictionArgs):
3232
forecasting_horizon (int): The number of timestamps to predict in the future. Range: [1, 1000].
3333
item_attributes_to_include_in_the_result (list): List of columns to include in the prediction output.
3434
explain_predictions (bool): If True, calculates explanations for the forecasted values along with predictions.
35-
automate_monitoring (bool): Controls whether to automatically create a monitor to calculate the drift each time the batch prediction is run. Defaults to true if not specified.
35+
create_monitor (bool): Controls whether to automatically create a monitor to calculate the drift each time the batch prediction is run. Defaults to true if not specified.
3636
"""
3737
for_eval: bool = dataclasses.field(default=None)
3838
predictions_start_date: str = dataclasses.field(default=None)
@@ -41,7 +41,7 @@ class ForecastingBatchPredictionArgs(BatchPredictionArgs):
4141
forecasting_horizon: int = dataclasses.field(default=None)
4242
item_attributes_to_include_in_the_result: list = dataclasses.field(default=None)
4343
explain_predictions: bool = dataclasses.field(default=None)
44-
automate_monitoring: bool = dataclasses.field(default=None)
44+
create_monitor: bool = dataclasses.field(default=None)
4545

4646
def __post_init__(self):
4747
self.problem_type = enums.ProblemType.FORECASTING
@@ -100,7 +100,7 @@ class PredictiveModelingBatchPredictionArgs(BatchPredictionArgs):
100100
explanation_filter_label (str): For classification problems specifies the label to which the explanation bounds are applied.
101101
output_columns (list): A list of column names to include in the prediction result.
102102
explain_predictions (bool): If True, calculates explanations for the predicted values along with predictions.
103-
automate_monitoring (bool): Controls whether to automatically create a monitor to calculate the drift each time the batch prediction is run. Defaults to true if not specified.
103+
create_monitor (bool): Controls whether to automatically create a monitor to calculate the drift each time the batch prediction is run. Defaults to true if not specified.
104104
"""
105105
for_eval: bool = dataclasses.field(default=None)
106106
explainer_type: enums.ExplainerType = dataclasses.field(default=None)
@@ -113,7 +113,7 @@ class PredictiveModelingBatchPredictionArgs(BatchPredictionArgs):
113113
explanation_filter_label: str = dataclasses.field(default=None)
114114
output_columns: list = dataclasses.field(default=None)
115115
explain_predictions: bool = dataclasses.field(default=None)
116-
automate_monitoring: bool = dataclasses.field(default=None)
116+
create_monitor: bool = dataclasses.field(default=None)
117117

118118
def __post_init__(self):
119119
self.problem_type = enums.ProblemType.PREDICTIVE_MODELING
@@ -194,10 +194,10 @@ class TrainablePlugAndPlayBatchPredictionArgs(BatchPredictionArgs):
194194
195195
Args:
196196
for_eval (bool): If True, the test fold which was created during training and used for metrics calculation will be used as input data. These predictions are hence, used for model evaluation.
197-
automate_monitoring (bool): Controls whether to automatically create a monitor to calculate the drift each time the batch prediction is run. Defaults to true if not specified.
197+
create_monitor (bool): Controls whether to automatically create a monitor to calculate the drift each time the batch prediction is run. Defaults to true if not specified.
198198
"""
199199
for_eval: bool = dataclasses.field(default=None)
200-
automate_monitoring: bool = dataclasses.field(default=None)
200+
create_monitor: bool = dataclasses.field(default=None)
201201

202202
def __post_init__(self):
203203
self.problem_type = enums.ProblemType.CUSTOM_ALGORITHM

abacusai/api_class/dataset_application_connector.py

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,13 @@
88
class DatasetConfig(ApiClass):
99
"""
1010
An abstract class for dataset configs specific to application connectors.
11+
12+
Args:
13+
application_connector_type(enums.ApplicationConnectorType): The type of application connector
14+
is_documentset (bool): Whether the dataset is a document set
1115
"""
1216
application_connector_type: enums.ApplicationConnectorType = dataclasses.field(default=None, repr=False, init=False)
17+
is_documentset: bool = dataclasses.field(default=None)
1318

1419
@classmethod
1520
def _get_builder(cls):
@@ -21,12 +26,16 @@ class ConfluenceDatasetConfig(DatasetConfig):
2126
"""
2227
Dataset config for Confluence Application Connector
2328
Args:
29+
location (str): The location of the pages to fetch
2430
pull_attachments (bool, optional): Whether to pull attachments for each page
2531
space_key (str, optional): The space key to fetch pages from
32+
extract_bounding_boxes (bool, optional): Whether to extract bounding boxes from the documents
2633
2734
"""
35+
location: str = dataclasses.field(default=None)
2836
pull_attachments: bool = dataclasses.field(default=False)
2937
space_key: str = dataclasses.field(default=None)
38+
extract_bounding_boxes: bool = dataclasses.field(default=False)
3039

3140
def __post_init__(self):
3241
self.application_connector_type = enums.ApplicationConnectorType.CONFLUENCE
@@ -57,13 +66,11 @@ class GoogleDriveDatasetConfig(DatasetConfig):
5766
5867
Args:
5968
location (str): The regex location of the files to fetch
60-
is_documentset (bool): Whether the dataset is a document set
6169
csv_delimiter (str, optional): If the file format is CSV, use a specific csv delimiter
6270
extract_bounding_boxes (bool, optional): Signifies whether to extract bounding boxes out of the documents. Only valid if is_documentset if True
6371
merge_file_schemas (bool, optional): Signifies if the merge file schema policy is enabled. Not applicable if is_documentset is True
6472
"""
6573
location: str = dataclasses.field(default=None)
66-
is_documentset: bool = dataclasses.field(default=None)
6774
csv_delimiter: str = dataclasses.field(default=None)
6875
extract_bounding_boxes: bool = dataclasses.field(default=False)
6976
merge_file_schemas: bool = dataclasses.field(default=False)
@@ -99,13 +106,11 @@ class OneDriveDatasetConfig(DatasetConfig):
99106
100107
Args:
101108
location (str): The regex location of the files to fetch
102-
is_documentset (bool): Whether the dataset is a document set
103109
csv_delimiter (str, optional): If the file format is CSV, use a specific csv delimiter
104110
extract_bounding_boxes (bool, optional): Signifies whether to extract bounding boxes out of the documents. Only valid if is_documentset if True
105111
merge_file_schemas (bool, optional): Signifies if the merge file schema policy is enabled. Not applicable if is_documentset is True
106112
"""
107113
location: str = dataclasses.field(default=None)
108-
is_documentset: bool = dataclasses.field(default=None)
109114
csv_delimiter: str = dataclasses.field(default=None)
110115
extract_bounding_boxes: bool = dataclasses.field(default=False)
111116
merge_file_schemas: bool = dataclasses.field(default=False)
@@ -127,7 +132,6 @@ class SharepointDatasetConfig(DatasetConfig):
127132
merge_file_schemas (bool, optional): Signifies if the merge file schema policy is enabled. Not applicable if is_documentset is True
128133
"""
129134
location: str = dataclasses.field(default=None)
130-
is_documentset: bool = dataclasses.field(default=None)
131135
csv_delimiter: str = dataclasses.field(default=None)
132136
extract_bounding_boxes: bool = dataclasses.field(default=False)
133137
merge_file_schemas: bool = dataclasses.field(default=False)
@@ -152,11 +156,14 @@ class AbacusUsageMetricsDatasetConfig(DatasetConfig):
152156
153157
Args:
154158
include_entire_conversation_history (bool): Whether to show the entire history for this deployment conversation
159+
include_all_feedback (bool): Whether to include all feedback for this deployment conversation
155160
"""
156161
include_entire_conversation_history: bool = dataclasses.field(default=False)
162+
include_all_feedback: bool = dataclasses.field(default=False)
157163

158164
def __post_init__(self):
159165
self.application_connector_type = enums.ApplicationConnectorType.ABACUSUSAGEMETRICS
166+
self.is_documentset = False
160167

161168

162169
@dataclasses.dataclass

0 commit comments

Comments
 (0)