|
11 | 11 | import numpy as np
|
12 | 12 | import pandas as pd
|
13 | 13 | from dateutil.parser import parse as dtparser
|
14 |
| -import urllib.parse |
15 | 14 |
|
16 | 15 |
|
17 | 16 | def list_workspaces(
|
@@ -901,12 +900,18 @@ def _resolve_workspace_name_and_id(
|
901 | 900 | workspace: str | UUID,
|
902 | 901 | ) -> Tuple[str, UUID]:
|
903 | 902 |
|
904 |
| - dfW = list_workspaces(workspace=workspace) |
905 |
| - try: |
906 |
| - workspace_name = dfW["Name"].iloc[0] |
907 |
| - workspace_id = dfW["Id"].iloc[0] |
908 |
| - except Exception: |
909 |
| - raise ValueError(f"{icons.red_dot} The '{workspace}' workspace was not found.") |
| 903 | + if workspace is None: |
| 904 | + workspace_id = fabric.get_workspace_id() |
| 905 | + workspace_name = fabric.resolve_workspace_name(workspace_id) |
| 906 | + else: |
| 907 | + dfW = list_workspaces(workspace=workspace) |
| 908 | + if not dfW.empty: |
| 909 | + workspace_name = dfW["Name"].iloc[0] |
| 910 | + workspace_id = dfW["Id"].iloc[0] |
| 911 | + else: |
| 912 | + raise ValueError( |
| 913 | + f"{icons.red_dot} The '{workspace}' workspace was not found." |
| 914 | + ) |
910 | 915 |
|
911 | 916 | return workspace_name, workspace_id
|
912 | 917 |
|
@@ -988,3 +993,60 @@ def list_reports(
|
988 | 993 | df["Modified Date"] = pd.to_datetime(df["Modified Date"], errors="coerce")
|
989 | 994 |
|
990 | 995 | return df
|
| 996 | + |
| 997 | + |
| 998 | +def get_capacity_assignment_status(workspace: Optional[str | UUID] = None): |
| 999 | + """ |
| 1000 | + Gets the status of the assignment-to-capacity operation for the specified workspace. |
| 1001 | +
|
| 1002 | + This is a wrapper function for the following API: `Capacities - Groups CapacityAssignmentStatus <https://learn.microsoft.com/rest/api/power-bi/capacities/groups-capacity-assignment-status>`_. |
| 1003 | +
|
| 1004 | + Parameters |
| 1005 | + ---------- |
| 1006 | + workspace : str | UUID, default=None |
| 1007 | + The Fabric workspace name or id. |
| 1008 | + Defaults to None which resolves to the workspace of the attached lakehouse |
| 1009 | + or if no lakehouse attached, resolves to the workspace of the notebook. |
| 1010 | +
|
| 1011 | + Returns |
| 1012 | + ------- |
| 1013 | + pandas.DataFrame |
| 1014 | + A pandas dataframe showing the status of the assignment-to-capacity operation for the specified workspace. |
| 1015 | + """ |
| 1016 | + |
| 1017 | + (workspace_name, workspace_id) = _resolve_workspace_name_and_id(workspace) |
| 1018 | + |
| 1019 | + df = pd.DataFrame( |
| 1020 | + columns=[ |
| 1021 | + "Status", |
| 1022 | + "Activity Id", |
| 1023 | + "Start Time", |
| 1024 | + "End Time", |
| 1025 | + "Capacity Id", |
| 1026 | + "Capacity Name", |
| 1027 | + ] |
| 1028 | + ) |
| 1029 | + |
| 1030 | + client = fabric.FabricRestClient() |
| 1031 | + response = client.get(f"/v1.0/myorg/groups/{workspace_id}/CapacityAssignmentStatus") |
| 1032 | + |
| 1033 | + if response.status_code != 200: |
| 1034 | + raise FabricHTTPException(response) |
| 1035 | + |
| 1036 | + v = response.json() |
| 1037 | + capacity_id = v.get("capacityId") |
| 1038 | + |
| 1039 | + (capacity_name, capacity_id) = _resolve_capacity_name_and_id(capacity=capacity_id) |
| 1040 | + |
| 1041 | + new_data = { |
| 1042 | + "Status": v.get("status"), |
| 1043 | + "Activity Id": v.get("activityId"), |
| 1044 | + "Start Time": v.get("startTime"), |
| 1045 | + "End Time": v.get("endTime"), |
| 1046 | + "Capacity Id": capacity_id, |
| 1047 | + "Capacity Name": capacity_name, |
| 1048 | + } |
| 1049 | + |
| 1050 | + df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True) |
| 1051 | + |
| 1052 | + return df |
0 commit comments