Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "hatchling.build"

[project]
name = "socketdev"
version = "3.0.4"
version = "3.0.5"
requires-python = ">= 3.9"
dependencies = [
'requests',
Expand Down
43 changes: 40 additions & 3 deletions socketdev/apitokens/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,20 +28,57 @@ def create(self, org_slug: str, **kwargs) -> dict:
log.error(response.text)
return {}

def update(self, org_slug: str, **kwargs) -> dict:
def list(self, org_slug: str, **kwargs) -> dict:
"""
List API tokens for an organization.

Args:
org_slug: Organization slug
**kwargs: Query parameters

Returns:
dict: API response containing list of tokens
"""
path = f"orgs/{org_slug}/api-tokens"
query_params = {}
if kwargs:
query_params.update(kwargs)

if query_params:
from urllib.parse import urlencode
path += "?" + urlencode(query_params)
response = self.api.do_request(path=path, method="GET")
if response.status_code == 200:
return response.json()
log.error(f"Error listing API tokens: {response.status_code}")
log.error(response.text)
return {}

def update(self, org_slug: str, token_id: str = None, **kwargs) -> dict:
"""
Update an API token.

Args:
org_slug: Organization slug
token_id: Token ID to update (optional, can be in kwargs)
**kwargs: Token update parameters

Returns:
dict: API response containing the updated token details
"""
path = f"orgs/{org_slug}/api-tokens/update"
# Extract token_id from kwargs if not provided as parameter
if token_id is None and 'token_id' in kwargs:
token_id = kwargs.pop('token_id')

if token_id:
path = f"orgs/{org_slug}/api-tokens/{token_id}"
method = "PUT"
else:
path = f"orgs/{org_slug}/api-tokens/update"
method = "POST"

payload = json.dumps(kwargs) if kwargs else "{}"
response = self.api.do_request(path=path, method="POST", payload=payload)
response = self.api.do_request(path=path, method=method, payload=payload)
if response.status_code == 200:
return response.json()
log.error(f"Error updating API token: {response.status_code}")
Expand Down
5 changes: 4 additions & 1 deletion socketdev/auditlog/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,10 @@ def get(self, org_slug: str, **kwargs) -> dict:
dict: API response containing audit log entries
"""
path = f"orgs/{org_slug}/audit-log"
response = self.api.do_request(path=path, params=kwargs)
if kwargs:
from urllib.parse import urlencode
path += "?" + urlencode(kwargs)
response = self.api.do_request(path=path)
if response.status_code == 200:
return response.json()
log.error(f"Error getting audit log: {response.status_code}")
Expand Down
61 changes: 28 additions & 33 deletions socketdev/core/dedupe.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,40 +88,35 @@ def alert_identity(alert: dict) -> tuple:

@staticmethod
def dedupe(packages: List[Dict[str, Any]], batched: bool = True) -> List[Dict[str, Any]]:
if batched:
grouped = Dedupe.consolidate_by_batch_index(packages)
else:
grouped = Dedupe.consolidate_by_order(packages)
return [Dedupe.consolidate_and_merge_alerts(group) for group in grouped.values()]
# Always group by inputPurl now, but keep the batched parameter for backward compatibility
grouped = Dedupe.consolidate_by_input_purl(packages)
results = []
for group in grouped.values():
result = Dedupe.consolidate_and_merge_alerts(group)
# Remove batchIndex from the result
if "batchIndex" in result:
del result["batchIndex"]
results.append(result)
return results

@staticmethod
def consolidate_by_batch_index(packages: List[Dict[str, Any]]) -> dict[int, list[dict[str, Any]]]:
grouped: Dict[int, List[Dict[str, Any]]] = defaultdict(list)
def consolidate_by_input_purl(packages: List[Dict[str, Any]]) -> dict[str, list[dict[str, Any]]]:
"""Group packages by their inputPurl field"""
grouped: Dict[str, List[Dict[str, Any]]] = defaultdict(list)

# Handle both list of packages and nested structure
if packages and isinstance(packages[0], list):
# If we get a nested list, flatten it
flat_packages = []
for sublist in packages:
if isinstance(sublist, list):
flat_packages.extend(sublist)
else:
flat_packages.append(sublist)
packages = flat_packages

for pkg in packages:
grouped[pkg["batchIndex"]].append(pkg)
return grouped

@staticmethod
def consolidate_by_order(packages: List[Dict[str, Any]]) -> dict[int, list[dict[str, Any]]]:
grouped: Dict[int, List[Dict[str, Any]]] = defaultdict(list)
batch_index = 0
package_purl = None
try:
for pkg in packages:
name = pkg["name"]
version = pkg["version"]
namespace = pkg.get("namespace")
ecosystem = pkg.get("type")
new_purl = f"pkg:{ecosystem}/"
if namespace:
new_purl += f"{namespace}/"
new_purl += f"{name}@{version}"
if package_purl is None:
package_purl = new_purl
if package_purl != new_purl:
batch_index += 1
pkg["batchIndex"] = batch_index
grouped[pkg["batchIndex"]].append(pkg)
except Exception as error:
log.error(error)
# inputPurl should always exist now, fallback to purl if not found
group_key = pkg.get("inputPurl", pkg.get("purl", str(hash(str(pkg)))))
grouped[group_key].append(pkg)
return grouped
27 changes: 16 additions & 11 deletions socketdev/dependencies/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ class Dependencies:
def __init__(self, api):
self.api = api

def post(self, files: list, params: dict, use_lazy_loading: bool = False, workspace: str = None) -> dict:
def post(self, files: list, params: dict, use_lazy_loading: bool = True, workspace: str = None, base_path: str = None) -> dict:
if use_lazy_loading:
loaded_files = Utils.load_files_for_sending_lazy(files, workspace)
loaded_files = Utils.load_files_for_sending_lazy(files, workspace, base_path=base_path)
else:
loaded_files = []
loaded_files = load_files(files, loaded_files)
Expand All @@ -30,15 +30,20 @@ def post(self, files: list, params: dict, use_lazy_loading: bool = False, worksp
log.error(response.text)
return result

def get(
self,
limit: int = 50,
offset: int = 0,
) -> dict:
path = "dependencies/search"
payload = {"limit": limit, "offset": offset}
payload_str = json.dumps(payload)
response = self.api.do_request(path=path, method="POST", payload=payload_str)
def get(self, org_slug: str = None, ecosystem: str = None, package: str = None, version: str = None, **kwargs) -> dict:
# If all specific parameters are provided, use the specific dependency endpoint
if org_slug and ecosystem and package and version:
path = f"orgs/{org_slug}/dependencies/{ecosystem}/{package}/{version}"
response = self.api.do_request(path=path, method="GET")
else:
# Otherwise use the search endpoint
limit = kwargs.get('limit', 50)
offset = kwargs.get('offset', 0)
path = "dependencies/search"
payload = {"limit": limit, "offset": offset}
payload_str = json.dumps(payload)
response = self.api.do_request(path=path, method="POST", payload=payload_str)

if response.status_code == 200:
result = response.json()
else:
Expand Down
5 changes: 3 additions & 2 deletions socketdev/diffscans/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def get(self, org_slug: str, diff_scan_id: str) -> dict:
log.error(f"Error fetching diff scan: {response.status_code}, message: {response.text}")
return {}

def create_from_repo(self, org_slug: str, repo_slug: str, files: list, params: Optional[Dict[str, Any]] = None, use_lazy_loading: bool = False, workspace: str = None, max_open_files: int = 100) -> dict:
def create_from_repo(self, org_slug: str, repo_slug: str, files: list, params: Optional[Dict[str, Any]] = None, use_lazy_loading: bool = False, workspace: str = None, max_open_files: int = 100, base_path: str = None) -> dict:
"""
Create a diff scan from repo HEAD, uploading files as multipart form data.

Expand All @@ -45,6 +45,7 @@ def create_from_repo(self, org_slug: str, repo_slug: str, files: list, params: O
workspace: Base directory path to make file paths relative to
max_open_files: Maximum number of files to keep open simultaneously when using
lazy loading. Useful for systems with low ulimit values (default: 100)
base_path: Optional base path to strip from key names for cleaner file organization

Returns:
dict: API response containing diff scan results
Expand All @@ -63,7 +64,7 @@ def create_from_repo(self, org_slug: str, repo_slug: str, files: list, params: O

# Use lazy loading if requested
if use_lazy_loading:
prepared_files = Utils.load_files_for_sending_lazy(files, workspace, max_open_files)
prepared_files = Utils.load_files_for_sending_lazy(files, workspace, max_open_files, base_path)
else:
prepared_files = files

Expand Down
17 changes: 13 additions & 4 deletions socketdev/fullscans/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -701,8 +701,16 @@ def __init__(self, api):


def get(self, org_slug: str, params: dict, use_types: bool = False) -> Union[dict, GetFullScanMetadataResponse]:
params_arg = urllib.parse.urlencode(params)
path = "orgs/" + org_slug + "/full-scans?" + str(params_arg)
# Check if this is a request for a specific scan by ID
if 'id' in params and len(params) == 1:
# Get specific scan by ID: /orgs/{org_slug}/full-scans/{full_scan_id}
scan_id = params['id']
path = f"orgs/{org_slug}/full-scans/{scan_id}"
else:
# List scans with query parameters: /orgs/{org_slug}/full-scans?params
params_arg = urllib.parse.urlencode(params)
path = "orgs/" + org_slug + "/full-scans?" + str(params_arg)

response = self.api.do_request(path=path)

if response.status_code == 200:
Expand All @@ -720,7 +728,7 @@ def get(self, org_slug: str, params: dict, use_types: bool = False) -> Union[dic
)
return {}

def post(self, files: list, params: FullScanParams, use_types: bool = False, use_lazy_loading: bool = False, workspace: str = None, max_open_files: int = 100) -> Union[dict, CreateFullScanResponse]:
def post(self, files: list, params: FullScanParams, use_types: bool = False, use_lazy_loading: bool = False, workspace: str = None, max_open_files: int = 100, base_path: str = None) -> Union[dict, CreateFullScanResponse]:
"""
Create a new full scan by uploading manifest files.

Expand All @@ -734,6 +742,7 @@ def post(self, files: list, params: FullScanParams, use_types: bool = False, use
workspace: Base directory path to make file paths relative to
max_open_files: Maximum number of files to keep open simultaneously when using
lazy loading. Useful for systems with low ulimit values (default: 100)
base_path: Optional base path to strip from key names for cleaner file organization

Returns:
dict or CreateFullScanResponse: API response containing scan results
Expand All @@ -754,7 +763,7 @@ def post(self, files: list, params: FullScanParams, use_types: bool = False, use

# Use lazy loading if requested
if use_lazy_loading:
prepared_files = Utils.load_files_for_sending_lazy(files, workspace, max_open_files)
prepared_files = Utils.load_files_for_sending_lazy(files, workspace, max_open_files, base_path)
else:
prepared_files = files

Expand Down
2 changes: 1 addition & 1 deletion socketdev/purl/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def post(self, license: str = "false", components: list = None, **kwargs) -> lis
purl.append(item)
except json.JSONDecodeError:
continue
purl_deduped = Dedupe.dedupe(purl)
purl_deduped = Dedupe.dedupe(purl, batched=True)
return purl_deduped

log.error(f"Error posting {components} to the Purl API: {response.status_code}")
Expand Down
23 changes: 19 additions & 4 deletions socketdev/report/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,29 @@ def supported(self) -> dict:
return {}

def create(self, files: list) -> dict:
# Handle both file path strings and file tuples
open_files = []
for name, path in files:
file_info = (name, (name, open(path, "rb"), "text/plain"))
open_files.append(file_info)
for file_entry in files:
if isinstance(file_entry, tuple) and len(file_entry) == 2:
name, file_data = file_entry
if isinstance(file_data, tuple) and len(file_data) == 2:
# Format: [("field_name", ("filename", file_obj))]
filename, file_obj = file_data
file_info = (name, (filename, file_obj, "text/plain"))
open_files.append(file_info)
else:
# Format: [("field_name", "file_path")]
file_info = (name, (name, open(file_data, "rb"), "text/plain"))
open_files.append(file_info)
else:
# Handle other formats if needed
log.error(f"Unexpected file format: {file_entry}")
return {}

path = "report/upload"
payload = {}
response = self.api.do_request(path=path, method="PUT", files=open_files, payload=payload)
if response.status_code == 200:
if response.status_code in (200, 201):
return response.json()
log.error(f"Error creating report: {response.status_code}")
log.error(response.text)
Expand Down
38 changes: 30 additions & 8 deletions socketdev/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ def validate_integration_type(integration_type: str) -> IntegrationType:
return integration_type # type: ignore

@staticmethod
def load_files_for_sending_lazy(files: List[str], workspace: str = None, max_open_files: int = 100) -> List[Tuple[str, Tuple[str, LazyFileLoader]]]:
def load_files_for_sending_lazy(files: List[str], workspace: str = None, max_open_files: int = 100, base_path: str = None) -> List[Tuple[str, Tuple[str, LazyFileLoader]]]:
"""
Prepares files for sending to the Socket API using lazy loading.

Expand All @@ -246,6 +246,7 @@ def load_files_for_sending_lazy(files: List[str], workspace: str = None, max_ope
files: List of file paths from find_files()
workspace: Base directory path to make paths relative to
max_open_files: Maximum number of files to keep open simultaneously (default: 100)
base_path: Optional base path to strip from key names for cleaner file organization

Returns:
List of tuples formatted for requests multipart upload:
Expand All @@ -257,6 +258,8 @@ def load_files_for_sending_lazy(files: List[str], workspace: str = None, max_ope
send_files = []
if workspace and "\\" in workspace:
workspace = workspace.replace("\\", "/")
if base_path and "\\" in base_path:
base_path = base_path.replace("\\", "/")

for file_path in files:
# Normalize file path
Expand All @@ -265,14 +268,33 @@ def load_files_for_sending_lazy(files: List[str], workspace: str = None, max_ope

_, name = file_path.rsplit("/", 1)

# Calculate the key (relative path from workspace)
if workspace and file_path.startswith(workspace):
# Calculate the key name for the form data
key = file_path

# If base_path is provided, strip it from the file path to create the key
if base_path:
# Normalize base_path to ensure consistent handling of trailing slashes
normalized_base_path = base_path.rstrip("/") + "/" if not base_path.endswith("/") else base_path
if key.startswith(normalized_base_path):
key = key[len(normalized_base_path):]
elif key.startswith(base_path.rstrip("/")):
# Handle case where base_path matches exactly without trailing slash
stripped_base = base_path.rstrip("/")
if key.startswith(stripped_base + "/") or key == stripped_base:
key = key[len(stripped_base):]
key = key.lstrip("/")

# If workspace is provided and base_path wasn't used, fall back to workspace logic
elif workspace and file_path.startswith(workspace):
key = file_path[len(workspace):]
else:
key = file_path

key = key.lstrip("/")
key = key.lstrip("./")
key = key.lstrip("/")
key = key.lstrip("./")

# If neither base_path nor workspace matched, clean up the key
if key == file_path:
# No base_path or workspace stripping occurred, clean up leading parts
key = key.lstrip("/")
key = key.lstrip("./")

# Create lazy file loader instead of opening file immediately
# Use the relative path (key) as filename instead of truncated basename
Expand Down
2 changes: 1 addition & 1 deletion socketdev/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "3.0.4"
__version__ = "3.0.5"
3 changes: 2 additions & 1 deletion tests/integration/test_all_endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ def test_dependencies_post_mocked(self):
json.dump({"name": "test", "version": "1.0.0"}, f)
f.flush()
try:
result = self.sdk.dependencies.post([("file", ("package.json", open(f.name, "rb")))], {})
# Pass the file path as a string, not a file object
result = self.sdk.dependencies.post([f.name], {})
self.assertIn("packages", result)
finally:
os.unlink(f.name)
Expand Down
Loading
Loading