Usage Patterns
Basic Client Session
from dalysdk import DalyClient
with DalyClient(workspace_api_key="wk_...", user_api_key="uk_...") as client:
locations = client.locations.list()
print("locations:", len(locations))
File-Based Library Uploads
with DalyClient(workspace_api_key="wk_...", user_api_key="uk_...") as client:
module = client.modules.create_from_file("/path/module.pan")
inverter = client.inverters.create_from_file("/path/inverter.ond")
Queue Energy Model and Poll Task
import time
from dalysdk import DalyClient
payload = {
"locationId": 1,
"blocks": [
{
"inverterId": 1,
"moduleId": 1,
"array": {"stringCount": 10, "stringLength": 28},
"layout": {"arrayType": "fixed", "gcr": 0.4, "azimuth": 180, "axisTilt": 0, "tilt": 25},
"shading": {"farShading": False, "nearShading": "unlimited", "electricalImpact": False},
"losses": {
"moduleQuality": 0.0,
"mismatch": 0.02,
"dcohmic": 0.01,
"lid": 0.01,
"stringVoltageMismatch": 0.01,
"invAuxLoss": True,
"uC": 25,
"uV": 1.2,
},
"acSystem": {
"numberOfInverters": 1,
"losses": {"acohmicLoss": 0.01, "transformer": {"ironLoss": 0.005, "copperLoss": 0.005}},
},
"albedo": {"jan": 0.2, "feb": 0.2, "mar": 0.2, "apr": 0.2, "may": 0.2, "jun": 0.2, "jul": 0.2, "aug": 0.2, "sep": 0.2, "oct": 0.2, "nov": 0.2, "dec": 0.2},
}
],
}
with DalyClient(workspace_api_key="wk_...", user_api_key="uk_...") as client:
queued = client.energy_models.create(payload, async_mode=True, timeout=120)
task_id = queued["taskId"]
model_id = queued["energyModelId"]
while True:
task = client.tasks.get(task_id)
status = task.get("status")
if status in {"completed", "failed", "cancelled"}:
break
time.sleep(2)
if status == "completed":
result = client.energy_models.get(model_id)
print(result.keys())
Edit and Rerun a Saved Imported Template
from copy import deepcopy
from dalysdk import DalyClient
with DalyClient(workspace_api_key="wk_...", user_api_key="uk_...") as client:
imported = client.projects.import_from_files(
file_paths=["/path/MyProject.PRJ", "/path/Variant.VC"]
)
energy_model_id = imported["energyModels"][0]["energyModelId"]
saved = client.energy_models.get_with_inputs(energy_model_id)
edited_inputs = deepcopy(saved["inputs"])
edited_inputs["output"] = {"name": "Imported Variant - Edited"}
client.energy_models.update(energy_model_id, edited_inputs)
queued = client.energy_models.run_saved(energy_model_id, async_mode=True)
print(queued["energyModelId"]) # same row ID
print(queued["taskDetails"]["taskId"])
run_saved() reruns the currently saved JSON in place. It does not create a
new energy-model row.
Batch Energy Model Runs
UseBatchRunner to submit many runs and wait for all to complete without writing your own poll loop. See Batch Runner for the full API reference.
from dalysdk import DalyClient, BatchRunner
with DalyClient(workspace_api_key="wk_...", user_api_key="uk_...") as client:
batch = BatchRunner(client)
# Submit individually or in bulk
batch.submit("Site A - Tracker", epm_input_a)
batch.submit_many([
("Site B - Fixed", epm_input_b),
("Site C - Bifacial", epm_input_c),
])
# Block until all finish (1-hour timeout, poll every 10s)
all_done = batch.wait(timeout=3600, poll_interval=10)
# Inspect results
print(batch.summary())
for result in batch.results():
print(result["name"], result["data"]["summary"])
Workflow Inspection
with DalyClient(workspace_api_key="wk_...", user_api_key="uk_...") as client:
wf = client.workflows.get(123)
print(wf["workflow"]["status"])
print(len(wf["tasks"]))
