Skip to content
Open
7 changes: 5 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ A Python client library for Microsoft Dataverse that provides a unified interfac
## Key features

- **🔄 CRUD Operations**: Create, read, update, and delete records with support for bulk operations and automatic retry
- **⚡ True Bulk Operations**: Automatically uses Dataverse's native `CreateMultiple`, `UpdateMultiple`, and `BulkDelete` Web API operations for maximum performance and transactional integrity
- **⚡ True Bulk Operations**: Automatically uses Dataverse's native `CreateMultiple`, `UpdateMultiple`, `DeleteMultiple` (elastic tables only), and `BulkDelete` Web API operations for maximum performance and transactional integrity
- **📊 SQL Queries**: Execute read-only SQL queries via the Dataverse Web API `?sql=` parameter
- **🏗️ Table Management**: Create, inspect, and delete custom tables and columns programmatically
- **📎 File Operations**: Upload files to Dataverse file columns with automatic chunking for large files
Expand Down Expand Up @@ -161,7 +161,10 @@ ids = client.create("account", payloads)
client.update("account", ids, {"industry": "Technology"})

# Bulk delete
client.delete("account", ids, use_bulk_delete=True)
client.delete("account", ids)

# Bulk delete async
client.delete_async("account", ids)
```

### Query data
Expand Down
18 changes: 13 additions & 5 deletions examples/advanced/walkthrough.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,11 +281,19 @@ def main():
client.delete(table_name, id1)
print(f"✓ Deleted single record: {id1}")

# Multiple delete (delete the paging demo records)
log_call(f"client.delete('{table_name}', [{len(paging_ids)} IDs])")
job_id = client.delete(table_name, paging_ids)
print(f"✓ Bulk delete job started: {job_id}")
print(f" (Deleting {len(paging_ids)} paging demo records)")
# Multiple delete (demonstrate async bulk job and synchronous fallback)
midpoint = len(paging_ids) // 2
async_ids = paging_ids[:midpoint]
sync_ids = paging_ids[midpoint:]

log_call(f"client.delete_async('{table_name}', [{len(async_ids)} IDs])")
job_id = client.delete_async(table_name, async_ids)
print(f"✓ Bulk delete job queued: {job_id}")
print(f" (Deleting {len(async_ids)} paging demo records asynchronously)")

log_call(f"client.delete('{table_name}', [{len(sync_ids)} IDs])")
client.delete(table_name, sync_ids)
print(f"✓ Synchronously deleted {len(sync_ids)} paging demo records")

# ============================================================================
# 11. CLEANUP
Expand Down
48 changes: 43 additions & 5 deletions src/PowerPlatform/Dataverse/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,8 +205,7 @@ def delete(
self,
table_schema_name: str,
ids: Union[str, List[str]],
use_bulk_delete: bool = True,
) -> Optional[str]:
) -> None:
"""
Delete one or more records by GUID.

Expand All @@ -231,7 +230,7 @@ def delete(

Delete multiple records::

job_id = client.delete("account", [id1, id2, id3])
client.delete("account", [id1, id2, id3])
"""
od = self._get_odata()
if isinstance(ids, str):
Expand All @@ -243,12 +242,51 @@ def delete(
return None
if not all(isinstance(rid, str) for rid in ids):
raise TypeError("ids must contain string GUIDs")
if use_bulk_delete:
return od._delete_multiple(table_schema_name, ids)
if od._is_elastic_table(table_schema_name):
od._delete_multiple(table_schema_name, ids)
return None
for rid in ids:
od._delete(table_schema_name, rid)
return None

def delete_async(
self,
table_schema_name: str,
ids: Union[str, List[str]],
) -> str:
"""
Issue an asynchronous BulkDelete job for one or more records.

:param table_schema_name: Schema name of the table (e.g. ``"account"`` or ``"new_MyTestTable"``).
:type table_schema_name: ``str``
:param ids: Single GUID string or list of GUID strings to delete.
:type ids: ``str`` or ``list[str]``

:raises TypeError: If ``ids`` is not str or list[str].
:raises HttpError: If the BulkDelete request fails.

:return: BulkDelete job identifier, a dummy if ids is empty.
:rtype: str

Example:
Queue a bulk delete::

job_id = client.delete_async("account", [id1, id2, id3])
"""
od = self._get_odata()
if isinstance(ids, str):
return od._delete_async(table_schema_name, [ids])
elif isinstance(ids, list):
if not all(isinstance(rid, str) for rid in ids):
raise TypeError("ids must contain string GUIDs")
sanitized = [rid.strip() for rid in ids if isinstance(rid, str) and rid.strip()]
if not sanitized:
noop_bulkdelete_job_id = "00000000-0000-0000-0000-000000000000"
return noop_bulkdelete_job_id
return od._delete_async(table_schema_name, sanitized)
else:
raise TypeError("ids must be str or list[str]")

def get(
self,
table_schema_name: str,
Expand Down
78 changes: 68 additions & 10 deletions src/PowerPlatform/Dataverse/data/odata.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,8 @@ def __init__(
self._logical_to_entityset_cache: dict[str, str] = {}
# Cache: normalized table_schema_name (lowercase) -> primary id attribute (e.g. accountid)
self._logical_primaryid_cache: dict[str, str] = {}
# Cache: logical name -> whether the table is elastic
self._elastic_table_cache: dict[str, bool] = {}
# Picklist label cache: (normalized_table_schema_name, normalized_attribute) -> {'map': {...}, 'ts': epoch_seconds}
self._picklist_label_cache = {}
self._picklist_cache_ttl_seconds = 3600 # 1 hour TTL
Expand Down Expand Up @@ -334,25 +336,47 @@ def _update_by_ids(self, table_schema_name: str, ids: List[str], changes: Union[
self._update_multiple(entity_set, table_schema_name, batch)
return None

def _delete_multiple(
def _delete_multiple(self, table_schema_name: str, ids: List[str]) -> None:
"""Delete records using the collection-bound ``DeleteMultiple`` action.

:param table_schema_name: Schema name of the table.
:type table_schema_name: ``str``
:param ids: GUIDs for the records to remove.
:type ids: ``list[str]``
:return: ``None``; the service does not return a representation.
:rtype: ``None``
"""
entity_set = self._entity_set_from_schema_name(table_schema_name)
pk_attr = self._primary_id_attr(table_schema_name)
logical_name = table_schema_name.lower()
targets: List[Dict[str, Any]] = []
for rid in ids:
targets.append({
"@odata.type": f"Microsoft.Dynamics.CRM.{logical_name}",
pk_attr: rid,
})
payload = {"Targets": targets}
url = f"{self.api}/{entity_set}/Microsoft.Dynamics.CRM.DeleteMultiple"
self._request("post", url, json=payload)
return None

def _delete_async(
self,
table_schema_name: str,
ids: List[str],
) -> Optional[str]:
) -> str:
"""Delete many records by GUID list via the ``BulkDelete`` action.

:param logical_name: Logical (singular) entity name.
:type logical_name: ``str``
:param table_schema_name: Schema name of the table.
:type table_schema_name: ``str``
:param ids: GUIDs of records to delete.
:type ids: ``list[str]``

:return: BulkDelete asynchronous job identifier when executed in bulk; ``None`` if no IDs provided or single deletes performed.
:rtype: ``str`` | ``None``
"""
targets = [rid for rid in ids if rid]
if not targets:
return None
value_objects = [{"Value": rid, "Type": "System.Guid"} for rid in targets]
noop_job_id = "00000000-0000-0000-0000-000000000000"
value_objects = [{"Value": rid, "Type": "System.Guid"} for rid in ids]

pk_attr = self._primary_id_attr(table_schema_name)
timestamp = datetime.now(timezone.utc).isoformat(timespec="seconds").replace("+00:00", "Z")
Expand Down Expand Up @@ -396,15 +420,16 @@ def _delete_multiple(
url = f"{self.api}/BulkDelete"
response = self._request("post", url, json=payload, expected=(200, 202, 204))

job_id = None
try:
body = response.json() if response.text else {}
except ValueError:
body = {}
if isinstance(body, dict):
job_id = body.get("JobId")
if isinstance(job_id, str) and job_id.strip():
return job_id

return job_id
return noop_job_id

def _format_key(self, key: str) -> str:
k = key.strip()
Expand Down Expand Up @@ -709,6 +734,39 @@ def _entity_set_from_schema_name(self, table_schema_name: str) -> str:
self._logical_primaryid_cache[cache_key] = primary_id_attr
return es

def _is_elastic_table(self, table_schema_name: str) -> bool:
"""Return ``True`` when the target table is elastic."""
if not table_schema_name:
raise ValueError("table schema name required")

logical_name = table_schema_name.lower()
cached = self._elastic_table_cache.get(logical_name)
if cached is not None:
return cached
url = f"{self.api}/EntityDefinitions"
logical_escaped = self._escape_odata_quotes(logical_name)
params = {
"$select": "LogicalName,TableType",
"$filter": f"LogicalName eq '{logical_escaped}'",
}
r = self._request("get", url, params=params)
try:
body = r.json()
items = body.get("value", []) if isinstance(body, dict) else []
except ValueError:
items = []
is_elastic = False
if items:
md = items[0]
if isinstance(md, dict):
table_type = md.get("TableType")
if isinstance(table_type, str):
is_elastic = table_type.strip().lower() == "elastic"
else:
is_elastic = False
self._elastic_table_cache[logical_name] = is_elastic
return is_elastic

# ---------------------- Table metadata helpers ----------------------
def _label(self, text: str) -> Dict[str, Any]:
lang = int(self.config.language_code)
Expand Down
Loading