Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 30 additions & 4 deletions other/materials_designer/workflows/valence_band_offset.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,17 @@
"\n",
"Calculate the valence band offset for a labeled interface using a DFT workflow on the Mat3ra platform.\n",
"\n",
"The notebook supports two modes controlled by `IS_POLAR` in cell 1.2:\n",
"\n",
"- `IS_POLAR = False`: use the standard VBO workflow based on extrema extracted from the macroscopically averaged electrostatic potential.\n",
"- `IS_POLAR = True`: append a final Python post-process that fits the electrostatic potential in the left and right slab regions, then uses those fitted averages for the polar-interface VBO evaluation.\n",
"\n",
"When the polar option is enabled, the final results section shows both the scalar VBO value and an additional plot, `Polar VBO Fit`, highlighting the fitted slab regions and linear fits used in the polar correction.\n",
"\n",
"<h2 style=\"color:green\">Usage</h2>\n",
"\n",
"1. Create and save an interface with labels (for example via `create_interface_with_min_strain_zsl.ipynb`).\n",
"1. Set the interface and calculation parameters in cells 1.2 and 1.3 below.\n",
"1. Set the interface and calculation parameters in cells 1.2 and 1.3 below, including `IS_POLAR` if the interface is polar.\n",
"1. Click \"Run\" > \"Run All\" to run all cells.\n",
"1. Wait for the job to complete.\n",
"1. Scroll down to view the VBO result.\n",
Expand All @@ -26,7 +33,7 @@
"1. Configure compute: get list of clusters and create compute configuration with selected cluster, queue, and number of processors.\n",
"1. Create the job with materials and workflow configuration: assemble the job from materials, workflow, project, and compute configuration.\n",
"1. Submit the job and monitor the status: submit the job and wait for completion.\n",
"1. Retrieve results: get and display the valence band offset.\n"
"1. Retrieve results: get and display the valence band offset, average ESP profiles, and the polar fit plot when `IS_POLAR = True`.\n"
]
},
{
Expand Down Expand Up @@ -87,6 +94,8 @@
"RIGHT_SIDE_PART = InterfacePartsEnum.FILM\n",
"INTERFACE_SYSTEM_NAME = None # Used as tag to group the materials. Defaults to shorthand from the loaded interface name\n",
"\n",
"IS_POLAR = False # Whether the interface is polar, to adjust the VBO calculation method accordingly.\n",
"\n",
"# 4. Workflow parameters\n",
"APPLICATION_NAME = \"espresso\"\n",
"WORKFLOW_SEARCH_TERM = \"valence_band_offset.json\"\n",
Expand Down Expand Up @@ -366,10 +375,13 @@
"source": [
"from mat3ra.standata.workflows import WorkflowStandata\n",
"from mat3ra.wode.workflows import Workflow\n",
"from utils.polar_vbo import add_polar_vbo_postprocess\n",
"from utils.visualize import visualize_workflow\n",
"\n",
"workflow_config = WorkflowStandata.filter_by_application(app.name).get_by_name_first_match(WORKFLOW_SEARCH_TERM)\n",
"workflow = Workflow.create(workflow_config)\n",
"if IS_POLAR:\n",
" workflow = add_polar_vbo_postprocess(workflow)\n",
"workflow.name = MY_WORKFLOW_NAME\n",
"\n",
"visualize_workflow(workflow)\n"
Expand Down Expand Up @@ -625,6 +637,7 @@
"outputs": [],
"source": [
"from mat3ra.prode import PropertyName\n",
"from utils.api import attach_signed_url_to_file_property\n",
"from utils.visualize import visualize_properties\n",
"\n",
"job_data = client.jobs.get(job_id)\n",
Expand All @@ -637,13 +650,15 @@
"print(f\"Valence Band Offset (VBO) value: {vbo_value['value']:.3f} eV\")\n",
"\n",
"avg_esp_unit_ids = {}\n",
"polar_file_unit_id = None\n",
"for subworkflow in workflow[\"subworkflows\"]:\n",
" subworkflow_name = subworkflow[\"name\"]\n",
" for unit in subworkflow[\"units\"]:\n",
" result_names = [result[\"name\"] for result in unit.get(\"results\", [])]\n",
" if \"average_potential_profile\" in result_names:\n",
" avg_esp_unit_ids[subworkflow_name] = unit[\"flowchartId\"]\n",
" break\n",
" if IS_POLAR and \"file_content\" in result_names:\n",
" polar_file_unit_id = unit[\"flowchartId\"]\n",
"\n",
"ordered_names = [\n",
" \"BS + Avg ESP (Interface)\",\n",
Expand All @@ -658,7 +673,18 @@
" property_name=\"average_potential_profile\",\n",
" unit_id=unit_id,\n",
" )[0]\n",
" visualize_properties(avg_esp_data, title=subworkflow_name)\n"
" visualize_properties(avg_esp_data, title=subworkflow_name)\n",
"\n",
"if IS_POLAR and polar_file_unit_id is not None:\n",
" polar_file_results = client.properties.get_for_job(\n",
" job_id,\n",
" property_name=\"file_content\",\n",
" unit_id=polar_file_unit_id,\n",
" )\n",
" if polar_file_results:\n",
" polar_file_data = attach_signed_url_to_file_property(client, job_id, polar_file_results[0])\n",
" visualize_properties(polar_file_data, title=\"Polar VBO Fit\")\n",
"\n"
]
},
{
Expand Down
38 changes: 36 additions & 2 deletions utils/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,14 +78,14 @@ def wait_for_jobs_to_finish_async(endpoint: JobEndpoints, job_ids: List[str]) ->
now = datetime.datetime.now().strftime("%Y-%m-%d-%H:%M:%S")
row = [
now,
counts.get("submitted", 0),
counts.get("submitted", 0) + counts.get("queued", 0),
counts.get("active", 0),
counts.get("finished", 0),
counts.get("error", 0),
]
pretty_print([row], headers, tablefmt="grid", stralign="center")

active_statuses = {"pre-submission", "submitted", "active"}
active_statuses = {"pre-submission", "submitted", "queued", "active"}
return not statuses or any(status in active_statuses for status in statuses)


Expand Down Expand Up @@ -200,6 +200,40 @@ def get_properties_for_job(client: APIClient, job_id: str, property_name: Option
return [{**prop, "fermiEnergy": fermi_energy} for prop in properties]


def attach_signed_url_to_file_property(client: APIClient, job_id: str, file_property: dict) -> dict:
"""
Enrich a file_content property with a signed URL from the job file listing.

Args:
client (APIClient): API client instance.
job_id (str): Job ID.
file_property (dict): Property record, typically a file_content result.

Returns:
dict: Property dict with signedUrl added when a matching job file is found.
"""
property_with_url = dict(file_property)
object_name = property_with_url.get("objectData", {}).get("NAME")
basename = property_with_url.get("basename")
job_files = client.jobs.list_files(job_id)

matching_job_file = next(
(
file
for file in job_files
if file.get("key") == object_name
or file.get("name") == basename
or file.get("key", "").endswith(f"/{basename}")
or file.get("key", "") == basename
),
None,
)
if matching_job_file:
property_with_url["signedUrl"] = matching_job_file.get("signedUrl") or matching_job_file.get("signedURL")

return property_with_url


def create_job(
api_client: APIClient,
materials: List[Union[dict, Material]],
Expand Down
Loading
Loading