diff --git a/other/materials_designer/workflows/Introduction.ipynb b/other/materials_designer/workflows/Introduction.ipynb
index 5f02082e..6fa6699d 100644
--- a/other/materials_designer/workflows/Introduction.ipynb
+++ b/other/materials_designer/workflows/Introduction.ipynb
@@ -87,7 +87,7 @@
"## 8. Electronics\n",
"\n",
"### 8.1. Valence Band Offset\n",
- "#### 8.1.1. Valence band offset at an interface. *(to be added)*\n",
+ "#### [8.1.1. Valence band offset at an interface.](valence_band_offset.ipynb)\n",
"\n",
"### 8.2. Dielectric Tensor\n",
"#### 8.2.1. Dielectric tensor calculation. *(to be added)*\n",
@@ -98,6 +98,14 @@
"### 9.1. Python / Shell\n",
"#### 9.1.1. Custom Python and Shell workflows. *(to be added)*\n"
]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "1",
+ "metadata": {},
+ "outputs": [],
+ "source": []
}
],
"metadata": {
diff --git a/other/materials_designer/workflows/valence_band_offset.ipynb b/other/materials_designer/workflows/valence_band_offset.ipynb
new file mode 100644
index 00000000..09858eeb
--- /dev/null
+++ b/other/materials_designer/workflows/valence_band_offset.ipynb
@@ -0,0 +1,686 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "0",
+ "metadata": {},
+ "source": [
+ "# Valence Band Offset (VBO)\n",
+ "\n",
+ "Calculate the valence band offset for a labeled interface using a DFT workflow on the Mat3ra platform.\n",
+ "\n",
+ "
Usage
\n",
+ "\n",
+ "1. Create and save an interface with labels (for example via `create_interface_with_min_strain_zsl.ipynb`).\n",
+ "1. Set the interface and calculation parameters in cells 1.2 and 1.3 below.\n",
+ "1. Click \"Run\" > \"Run All\" to run all cells.\n",
+ "1. Wait for the job to complete.\n",
+ "1. Scroll down to view the VBO result.\n",
+ "\n",
+ "## Summary\n",
+ "\n",
+ "1. Set up the environment and parameters: install packages (JupyterLite only) and configure parameters for the interface, workflow, compute resources, and job.\n",
+ "1. Authenticate and initialize API client: authenticate via browser, initialize the client, then select account and project.\n",
+ "1. Create materials: load an interface from the `../uploads` folder, split it into interface/left/right parts using interface labels, strip labels required by Quantum ESPRESSO, and save all three materials to the platform tagged with the interface name.\n",
+ "1. Configure workflow: select application, load the VBO workflow from Standata, assign materials to subworkflows by role, set model and computational parameters, and preview the workflow.\n",
+ "1. Configure compute: get list of clusters and create compute configuration with selected cluster, queue, and number of processors.\n",
+ "1. Create the job with materials and workflow configuration: assemble the job from materials, workflow, project, and compute configuration.\n",
+ "1. Submit the job and monitor the status: submit the job and wait for completion.\n",
+ "1. Retrieve results: get and display the valence band offset.\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1",
+ "metadata": {},
+ "source": [
+ "## 1. Set up the environment and parameters\n",
+ "### 1.1. Install packages (JupyterLite)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import sys\n",
+ "\n",
+ "if sys.platform == \"emscripten\":\n",
+ " import micropip\n",
+ "\n",
+ " await micropip.install(\"mat3ra-api-examples\", deps=False)\n",
+ " await micropip.install(\"mat3ra-utils\")\n",
+ " from mat3ra.utils.jupyterlite.packages import install_packages\n",
+ "\n",
+ " await install_packages(\"api_examples\")\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3",
+ "metadata": {},
+ "source": [
+ "### 1.2. Set parameters\n",
+ "Provide the INTERFACE_NAME to match the name of the structure in \"uploads\" folder for search. Left and right parts will be extracted based on substrate/film labels present in the generated interface."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "4",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from datetime import datetime\n",
+ "from mat3ra.ide.compute import QueueName\n",
+ "from mat3ra.made.tools.convert.interface_parts_enum import InterfacePartsEnum\n",
+ "\n",
+ "# 2. Auth and organization parameters\n",
+ "ORGANIZATION_NAME = None\n",
+ "\n",
+ "# 3. Material parameters\n",
+ "FOLDER = \"../uploads\"\n",
+ "INTERFACE_NAME = \"Interface\" # To search for in \"uploads\" folder\n",
+ "LEFT_SIDE_PART = InterfacePartsEnum.SUBSTRATE\n",
+ "RIGHT_SIDE_PART = InterfacePartsEnum.FILM\n",
+ "INTERFACE_SYSTEM_NAME = None # Used as tag to group the materials. Defaults to shorthand from the loaded interface name\n",
+ "\n",
+ "# 4. Workflow parameters\n",
+ "APPLICATION_NAME = \"espresso\"\n",
+ "WORKFLOW_SEARCH_TERM = \"valence_band_offset.json\"\n",
+ "MY_WORKFLOW_NAME = \"VBO\"\n",
+ "\n",
+ "# 5. Compute parameters\n",
+ "CLUSTER_NAME = None\n",
+ "QUEUE_NAME = QueueName.D\n",
+ "PPN = 1\n",
+ "\n",
+ "# 6. Job parameters\n",
+ "timestamp = datetime.now().strftime(\"%Y-%m-%d %H:%M\")\n",
+ "POLL_INTERVAL = 60 # seconds"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5",
+ "metadata": {},
+ "source": [
+ "### 1.3. Set specific VBO parameters\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Method parameters\n",
+ "PSEUDOPOTENTIAL_TYPE = \"us\" # \"us\" (ultrasoft), \"nc\" (norm-conserving), \"paw\"\n",
+ "FUNCTIONAL = \"pbe\" # for gga: \"pbe\", \"pbesol\"; for lda: \"pz\"\n",
+ "MODEL_SUBTYPE = \"gga\"\n",
+ "\n",
+ "# K-grid and k-path\n",
+ "SCF_KGRID = None # e.g. [8, 8, 1]\n",
+ "KPATH = None # e.g. [{\"point\": \"G\", \"steps\": 20}, {\"point\": \"M\", \"steps\": 20}]\n",
+ "\n",
+ "# SCF diagonalization and mixing\n",
+ "DIAGONALIZATION = \"david\" # \"david\" or \"cg\"\n",
+ "MIXING_BETA = 0.3\n",
+ "\n",
+ "# Energy cutoffs\n",
+ "ECUTWFC = 40\n",
+ "ECUTRHO = 200\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7",
+ "metadata": {},
+ "source": [
+ "## 2. Authenticate and initialize API client\n",
+ "### 2.1. Authenticate\n",
+ "Authenticate in the browser and have credentials stored in environment variable \"OIDC_ACCESS_TOKEN\".\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from utils.auth import authenticate\n",
+ "\n",
+ "await authenticate()\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9",
+ "metadata": {},
+ "source": [
+ "### 2.2. Initialize API client\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "10",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from mat3ra.api_client import APIClient\n",
+ "\n",
+ "client = APIClient.authenticate()\n",
+ "client\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "11",
+ "metadata": {},
+ "source": [
+ "### 2.3. Select account\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "12",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "client.list_accounts()\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "13",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "selected_account = client.my_account\n",
+ "\n",
+ "if ORGANIZATION_NAME:\n",
+ " selected_account = client.get_account(name=ORGANIZATION_NAME)\n",
+ "\n",
+ "ACCOUNT_ID = selected_account.id\n",
+ "print(f\"✅ Selected account ID: {ACCOUNT_ID}, name: {selected_account.name}\")\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "14",
+ "metadata": {},
+ "source": [
+ "### 2.4. Select project\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "15",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "projects = client.projects.list({\"isDefault\": True, \"owner._id\": ACCOUNT_ID})\n",
+ "project_id = projects[0][\"_id\"]\n",
+ "print(f\"✅ Using project: {projects[0]['name']} ({project_id})\")\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "16",
+ "metadata": {},
+ "source": [
+ "## 3. Create materials\n",
+ "### 3.1. Load interface from local folder\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "17",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from utils.jupyterlite import load_material_from_folder\n",
+ "from utils.visualize import visualize_materials as visualize\n",
+ "\n",
+ "interface = load_material_from_folder(FOLDER, INTERFACE_NAME)\n",
+ "\n",
+ "visualize(interface, repetitions=[1, 1, 1])\n",
+ "visualize(interface, repetitions=[1, 1, 1], rotation=\"-90x\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "18",
+ "metadata": {},
+ "source": [
+ "### 3.2. Create materials from interface parts\n",
+ "Slabs are isolated based on labels, then labels removed as they are not compatible with Quantum ESPRESSO. The three materials (interface, left slab, right slab) are named and visualized."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "19",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import re\n",
+ "from mat3ra.made.tools.modify import interface_get_part\n",
+ "\n",
+ "interface_shorthand = re.match(r\"^(.+?)\\s\", interface.name).group(1) if re.match(r\"^(.+?)\\s\",\n",
+ " interface.name) else INTERFACE_NAME\n",
+ "interface_system_name = INTERFACE_SYSTEM_NAME or interface_shorthand\n",
+ "left_material = interface_get_part(interface, part=LEFT_SIDE_PART)\n",
+ "right_material = interface_get_part(interface, part=RIGHT_SIDE_PART)\n",
+ "interface_material = interface.clone()\n",
+ "\n",
+ "left_material.basis.set_labels_from_list([])\n",
+ "right_material.basis.set_labels_from_list([])\n",
+ "interface_material.basis.set_labels_from_list([])\n",
+ "\n",
+ "interface_material.name = f\"{interface_system_name} Interface\"\n",
+ "left_material.name = f\"{interface_system_name} Left\"\n",
+ "right_material.name = f\"{interface_system_name} Right\"\n",
+ "\n",
+ "materials_by_role = {\"interface\": interface_material, \"substrate\": left_material, \"film\": right_material}\n",
+ "for role, material in materials_by_role.items():\n",
+ " print(f\" {role}: {material.name}\")\n",
+ "visualize(list(materials_by_role.values()), repetitions=[1, 1, 1], rotation=\"-90x\")\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "20",
+ "metadata": {},
+ "source": [
+ "### 3.3. Save materials\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "21",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from mat3ra.made.material import Material\n",
+ "\n",
+ "saved_materials = {}\n",
+ "for role, material in materials_by_role.items():\n",
+ " material_config = material.to_dict()\n",
+ " material_config[\"name\"] = material.name\n",
+ " existing_tags = material_config.get(\"tags\") or []\n",
+ " material_config[\"tags\"] = sorted(set([*existing_tags, interface_system_name]))\n",
+ " saved = Material.create(client.materials.create(material_config, owner_id=ACCOUNT_ID))\n",
+ " saved_materials[role] = saved\n",
+ " print(f\" {role}: {saved.name} ({saved.id}) | tags={material_config['tags']}\")\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "22",
+ "metadata": {},
+ "source": [
+ "## 4. Configure workflow\n",
+ "### 4.1. Select application\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "23",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from mat3ra.ade.application import Application\n",
+ "from mat3ra.standata.applications import ApplicationStandata\n",
+ "\n",
+ "app_config = ApplicationStandata.get_by_name_first_match(APPLICATION_NAME)\n",
+ "app = Application(**app_config)\n",
+ "print(f\"Using application: {app.name}\")\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "24",
+ "metadata": {},
+ "source": [
+ "### 4.2. Load workflow from Standata and preview it\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "25",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from mat3ra.standata.workflows import WorkflowStandata\n",
+ "from mat3ra.wode.workflows import Workflow\n",
+ "from utils.visualize import visualize_workflow\n",
+ "\n",
+ "workflow_config = WorkflowStandata.filter_by_application(app.name).get_by_name_first_match(WORKFLOW_SEARCH_TERM)\n",
+ "workflow = Workflow.create(workflow_config)\n",
+ "workflow.name = MY_WORKFLOW_NAME\n",
+ "\n",
+ "visualize_workflow(workflow)\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "26",
+ "metadata": {},
+ "source": [
+ "### 4.3. Set model and its parameters (physics)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "27",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from mat3ra.mode.model import Model\n",
+ "from mat3ra.standata.model_tree import ModelTreeStandata\n",
+ "\n",
+ "model_config = ModelTreeStandata.get_model_by_parameters(\n",
+ " type=\"dft\", subtype=MODEL_SUBTYPE, functional=FUNCTIONAL\n",
+ ")\n",
+ "model_config[\"method\"] = {\"type\": \"pseudopotential\", \"subtype\": PSEUDOPOTENTIAL_TYPE}\n",
+ "model = Model.create(model_config)\n",
+ "\n",
+ "for subworkflow in workflow.subworkflows:\n",
+ " if subworkflow.application.name == APPLICATION_NAME:\n",
+ " subworkflow.model = model\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "28",
+ "metadata": {},
+ "source": [
+ "### 4.4. Modify method (computational parameters): k-grid, k-path, cutoffs, diagonalization, and mixing\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "29",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from mat3ra.wode.context.providers import (\n",
+ " PlanewaveCutoffsContextProvider,\n",
+ " PointsGridDataProvider,\n",
+ " PointsPathDataProvider,\n",
+ ")\n",
+ "\n",
+ "\n",
+ "def set_pw_electrons_parameters(unit, diagonalization, mixing_beta):\n",
+ " unit.replace_in_input_content(r\"diagonalization\\s*=\\s*'[^']*'\", f\"diagonalization = '{diagonalization}'\")\n",
+ " unit.replace_in_input_content(r\"mixing_beta\\s*=\\s*[-+0-9.eE]+\", f\"mixing_beta = {mixing_beta}\")\n",
+ " for input in unit.input:\n",
+ " if isinstance(input, dict) and \"content\" in input:\n",
+ " input[\"rendered\"] = input[\"content\"]\n",
+ " return unit\n",
+ "\n",
+ "\n",
+ "for subworkflow in workflow.subworkflows:\n",
+ " if subworkflow.application.name != APPLICATION_NAME:\n",
+ " continue\n",
+ "\n",
+ " unit_names = [unit.name for unit in subworkflow.units]\n",
+ "\n",
+ " if SCF_KGRID is not None and \"pw_scf\" in unit_names:\n",
+ " unit = subworkflow.get_unit_by_name(name=\"pw_scf\")\n",
+ " unit.add_context(PointsGridDataProvider(dimensions=SCF_KGRID, isEdited=True).yield_data())\n",
+ " subworkflow.set_unit(unit)\n",
+ "\n",
+ " if KPATH is not None and \"pw_bands\" in unit_names:\n",
+ " unit = subworkflow.get_unit_by_name(name=\"pw_bands\")\n",
+ " unit.add_context(PointsPathDataProvider(path=KPATH, isEdited=True).yield_data())\n",
+ " subworkflow.set_unit(unit)\n",
+ "\n",
+ " cutoffs_context = PlanewaveCutoffsContextProvider(\n",
+ " wavefunction=ECUTWFC, density=ECUTRHO, isEdited=True\n",
+ " ).yield_data()\n",
+ " for unit_name in [\"pw_scf\", \"pw_bands\"]:\n",
+ " if unit_name not in unit_names:\n",
+ " continue\n",
+ " unit = subworkflow.get_unit_by_name(name=unit_name)\n",
+ " unit.add_context(cutoffs_context)\n",
+ " unit = set_pw_electrons_parameters(unit, DIAGONALIZATION, MIXING_BETA)\n",
+ " subworkflow.set_unit(unit)\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "30",
+ "metadata": {},
+ "source": [
+ "### 4.5. Preview final workflow\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "31",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "visualize_workflow(workflow)\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "32",
+ "metadata": {},
+ "source": [
+ "## 5. Create the compute configuration\n",
+ "### 5.1. Get list of clusters\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "33",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "clusters = client.clusters.list()\n",
+ "print(f\"Available clusters: {[c['hostname'] for c in clusters]}\")\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "34",
+ "metadata": {},
+ "source": [
+ "### 5.2. Create compute configuration\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "35",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from mat3ra.ide.compute import Compute\n",
+ "\n",
+ "if CLUSTER_NAME:\n",
+ " cluster = next((c for c in clusters if CLUSTER_NAME in c[\"hostname\"]), None)\n",
+ "else:\n",
+ " cluster = clusters[0]\n",
+ "\n",
+ "compute = Compute(\n",
+ " cluster=cluster,\n",
+ " queue=QUEUE_NAME,\n",
+ " ppn=PPN\n",
+ ")\n",
+ "print(f\"Using cluster: {compute.cluster.hostname}, queue: {QUEUE_NAME}, ppn: {PPN}\")\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "36",
+ "metadata": {},
+ "source": [
+ "## 6. Create the job\n",
+ "### 6.1. Create job\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "37",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from utils.api import create_job\n",
+ "from utils.generic import dict_to_namespace\n",
+ "from utils.visualize import display_JSON\n",
+ "\n",
+ "materials = list(saved_materials.values())\n",
+ "\n",
+ "job_name = f\"{MY_WORKFLOW_NAME} {interface_system_name} {timestamp}\"\n",
+ "workflow.name = job_name\n",
+ "\n",
+ "print(f\"Materials: {[m.id for m in materials]}\")\n",
+ "print(f\"Project: {project_id}\")\n",
+ "\n",
+ "job_response = create_job(\n",
+ " api_client=client,\n",
+ " materials=materials,\n",
+ " workflow=workflow,\n",
+ " project_id=project_id,\n",
+ " owner_id=ACCOUNT_ID,\n",
+ " prefix=job_name,\n",
+ " compute=compute.to_dict(),\n",
+ ")\n",
+ "\n",
+ "job = dict_to_namespace(job_response)\n",
+ "job_id = job._id\n",
+ "print(\"✅ Job created successfully!\")\n",
+ "print(f\"Job ID: {job_id}\")\n",
+ "\n",
+ "display_JSON(job_response)\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "38",
+ "metadata": {},
+ "source": [
+ "## 7. Submit the job and monitor the status\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "39",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "client.jobs.submit(job_id)\n",
+ "print(f\"✅ Job {job_id} submitted successfully!\")\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "40",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from utils.api import wait_for_jobs_to_finish_async\n",
+ "\n",
+ "await wait_for_jobs_to_finish_async(client.jobs, [job_id], poll_interval=POLL_INTERVAL)\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "41",
+ "metadata": {},
+ "source": [
+ "## 8. Retrieve and visualize results\n",
+ "### 8.1. Valence Band Offset and average ESP profiles\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "42",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from mat3ra.prode import PropertyName\n",
+ "from utils.visualize import visualize_properties\n",
+ "\n",
+ "job_data = client.jobs.get(job_id)\n",
+ "workflow = job_data[\"workflow\"]\n",
+ "\n",
+ "vbo_value = client.properties.get_for_job(\n",
+ " job_id,\n",
+ " property_name=PropertyName.scalar.valence_band_offset.value,\n",
+ ")[0]\n",
+ "print(f\"Valence Band Offset (VBO) value: {vbo_value['value']:.3f} eV\")\n",
+ "\n",
+ "avg_esp_unit_ids = {}\n",
+ "for subworkflow in workflow[\"subworkflows\"]:\n",
+ " subworkflow_name = subworkflow[\"name\"]\n",
+ " for unit in subworkflow[\"units\"]:\n",
+ " result_names = [result[\"name\"] for result in unit.get(\"results\", [])]\n",
+ " if \"average_potential_profile\" in result_names:\n",
+ " avg_esp_unit_ids[subworkflow_name] = unit[\"flowchartId\"]\n",
+ " break\n",
+ "\n",
+ "ordered_names = [\n",
+ " \"BS + Avg ESP (Interface)\",\n",
+ " \"BS + Avg ESP (interface left)\",\n",
+ " \"BS + Avg ESP (interface right)\",\n",
+ "]\n",
+ "\n",
+ "for subworkflow_name in ordered_names:\n",
+ " unit_id = avg_esp_unit_ids[subworkflow_name]\n",
+ " avg_esp_data = client.properties.get_for_job(\n",
+ " job_id,\n",
+ " property_name=\"average_potential_profile\",\n",
+ " unit_id=unit_id,\n",
+ " )[0]\n",
+ " visualize_properties(avg_esp_data, title=subworkflow_name)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "43",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.11.2"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/utils/api.py b/utils/api.py
index 3fcfffbd..b2959beb 100644
--- a/utils/api.py
+++ b/utils/api.py
@@ -238,7 +238,7 @@ def create_job(
# Strip _id so the server uses the embedded workflow as-is instead of fetching from DB,
# which would discard any unit-level context (kpath, kgrid, cutoffs, etc.).
job_workflow_dict.pop("_id", None)
- is_multimaterial = job_workflow_dict.get("isMultimaterial", False)
+ is_multimaterial = job_workflow_dict.get("isMultiMaterial", False)
config = {
"_project": {"_id": project_id},
@@ -248,7 +248,11 @@ def create_job(
}
if is_multimaterial:
- config["_materials"] = [{"_id": mid} for mid in {md["_id"] for md in material_dicts}]
+ # Some API environments still validate `_material._id` even for
+ # multi-material workflows, so provide the first material as a
+ # compatibility fallback while preserving the full ordered list.
+ config["_material"] = {"_id": material_dicts[0]["_id"]}
+ config["_materials"] = [{"_id": m["_id"]} for m in material_dicts]
else:
config["_material"] = {"_id": material_dicts[0]["_id"]}